sokol_gfx.h 345 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747774877497750775177527753775477557756775777587759776077617762776377647765776677677768776977707771777277737774777577767777777877797780778177827783778477857786778777887789779077917792779377947795779677977798779978007801780278037804780578067807780878097810781178127813781478157816781778187819782078217822782378247825782678277828782978307831783278337834783578367837783878397840784178427843784478457846784778487849785078517852785378547855785678577858785978607861786278637864786578667867786878697870787178727873787478757876787778787879788078817882788378847885788678877888788978907891789278937894789578967897789878997900790179027903790479057906790779087909791079117912791379147915791679177918791979207921792279237924792579267927792879297930793179327933793479357936793779387939794079417942794379447945794679477948794979507951795279537954795579567957795879597960796179627963796479657966796779687969797079717972797379747975797679777978797979807981798279837984798579867987798879897990799179927993799479957996799779987999800080018002800380048005800680078008800980108011801280138014801580168017801880198020802180228023802480258026802780288029803080318032803380348035803680378038803980408041804280438044804580468047804880498050805180528053805480558056805780588059806080618062806380648065806680678068806980708071807280738074807580768077807880798080808180828083808480858086808780888089809080918092809380948095809680978098809981008101810281038104810581068107810881098110811181128113811481158116811781188119812081218122812381248125812681278128812981308131813281338134813581368137813881398140814181428143814481458146814781488149815081518152815381548155815681578158815981608161816281638164816581668167816881698170817181728173817481758176817781788179818081818182818381848185818681878188818981908191819281938194819581968197819881998200820182028203820482058206820782088209821082118212821382148215821682178218821982208221822282238224822582268227822882298230823182328233823482358236823782388239824082418242824382448245824682478248824982508251825282538254825582568257825882598260826182628263826482658266826782688269827082718272827382748275827682778278827982808281828282838284828582868287828882898290829182928293829482958296829782988299830083018302830383048305830683078308830983108311831283138314831583168317831883198320832183228323832483258326832783288329833083318332833383348335833683378338833983408341834283438344834583468347834883498350835183528353835483558356835783588359836083618362836383648365836683678368836983708371837283738374837583768377837883798380838183828383838483858386838783888389839083918392839383948395839683978398839984008401840284038404840584068407840884098410841184128413841484158416841784188419842084218422842384248425842684278428842984308431843284338434843584368437843884398440844184428443844484458446844784488449845084518452845384548455845684578458845984608461846284638464846584668467846884698470847184728473847484758476847784788479848084818482848384848485848684878488848984908491849284938494849584968497849884998500850185028503850485058506850785088509851085118512851385148515851685178518851985208521852285238524852585268527852885298530853185328533853485358536853785388539854085418542854385448545854685478548854985508551855285538554855585568557855885598560856185628563856485658566856785688569857085718572857385748575857685778578857985808581858285838584858585868587858885898590859185928593859485958596859785988599860086018602860386048605860686078608860986108611861286138614861586168617861886198620862186228623862486258626862786288629863086318632863386348635863686378638863986408641864286438644864586468647864886498650865186528653
  1. #pragma once
  2. /*
  3. sokol_gfx.h -- simple 3D API wrapper
  4. Do this:
  5. #define SOKOL_IMPL
  6. before you include this file in *one* C or C++ file to create the
  7. implementation.
  8. In the same place define one of the following to select the rendering
  9. backend:
  10. #define SOKOL_GLCORE33
  11. #define SOKOL_GLES2
  12. #define SOKOL_GLES3
  13. #define SOKOL_D3D11
  14. #define SOKOL_METAL_MACOS
  15. #define SOKOL_METAL_IOS
  16. I.e. for the GL 3.3 Core Profile it should look like this:
  17. #include ...
  18. #include ...
  19. #define SOKOL_IMPL
  20. #define SOKOL_GLCORE33
  21. #include "sokol_gfx.h"
  22. To enable shader compilation support in the D3D11 backend:
  23. #define SOKOL_D3D11_SHADER_COMPILER
  24. If SOKOL_D3D11_SHADER_COMPILER is enabled, the executable will link against
  25. d3dcompiler.lib (d3dcompiler_47.dll).
  26. Optionally provide the following defines with your own implementations:
  27. SOKOL_ASSERT(c) - your own assert macro (default: assert(c))
  28. SOKOL_MALLOC(s) - your own malloc function (default: malloc(s))
  29. SOKOL_FREE(p) - your own free function (default: free(p))
  30. SOKOL_LOG(msg) - your own logging function (default: puts(msg))
  31. SOKOL_UNREACHABLE() - a guard macro for unreachable code (default: assert(false))
  32. API usage validation macros:
  33. SOKOL_VALIDATE_BEGIN() - begin a validation block (default:_sg_validate_begin())
  34. SOKOL_VALIDATE(cond, err) - like assert but for API validation (default: _sg_validate(cond, err))
  35. SOKOL_VALIDATE_END() - end a validation block, return true if all checks in block passed (default: bool _sg_validate())
  36. If you don't want validation errors to be fatal, define SOKOL_VALIDATE_NON_FATAL,
  37. be aware though that this may spam SOKOL_LOG messages.
  38. Optionally define the following to force debug checks and validations
  39. even in release mode:
  40. SOKOL_DEBUG - by default this is defined if _DEBUG is defined
  41. sokol_gfx DOES NOT:
  42. ===================
  43. - create a window or the 3D-API context/device, you must do this
  44. before sokol_gfx is initialized, and pass any required information
  45. (like 3D device pointers) to the sokol_gfx initialization call
  46. - present the rendered frame, how this is done exactly usually depends
  47. on how the window and 3D-API context/device was created
  48. - provide a unified shader language, instead 3D-API-specific shader
  49. source-code or shader-bytecode must be provided
  50. For complete code examples using the various backend 3D-APIs, see:
  51. https://github.com/floooh/sokol-samples
  52. STEP BY STEP
  53. ============
  54. --- to initialize sokol_gfx, after creating a window and a 3D-API
  55. context/device, call:
  56. sg_setup(const sg_desc*)
  57. --- create resource objects (at least buffers, shaders and pipelines,
  58. and optionally images and passes):
  59. sg_buffer sg_make_buffer(const sg_buffer_desc*)
  60. sg_image sg_make_image(const sg_image_desc*)
  61. sg_shader sg_make_shader(const sg_shader_desc*)
  62. sg_pipeline sg_make_pipeline(const sg_pipeline_desc*)
  63. sg_pass sg_make_pass(const sg_pass_desc*)
  64. --- start rendering to the default frame buffer with:
  65. sg_begin_default_pass(const sg_pass_action* actions, int width, int height)
  66. --- or start rendering to an offscreen framebuffer with:
  67. sg_begin_pass(sg_pass pass, const sg_pass_action* actions)
  68. --- fill an sg_draw_state struct with the resource bindings for the next
  69. draw call (one pipeline object, 1..N vertex buffers, 0 or 1
  70. index buffer, 0..N image objects to use as textures each on
  71. the vertex-shader- and fragment-shader-stage and then call
  72. sg_apply_draw_state(const sg_draw_state* draw_state)
  73. to update the resource bindings
  74. --- optionally update shader uniform data with:
  75. sg_apply_uniform_block(sg_shader_stage stage, int ub_index, const void* data, int num_bytes)
  76. --- kick off a draw call with:
  77. sg_draw(int base_element, int num_elements, int num_instances)
  78. --- finish the current rendering pass with:
  79. sg_end_pass()
  80. --- when done with the current frame, call
  81. sg_commit()
  82. --- at the end of your program, shutdown sokol_gfx with:
  83. sg_shutdown()
  84. --- if you need to destroy resources before sg_shutdown(), call:
  85. sg_destroy_buffer(sg_buffer buf)
  86. sg_destroy_image(sg_image img)
  87. sg_destroy_shader(sg_shader shd)
  88. sg_destroy_pipeline(sg_pipeline pip)
  89. sg_destroy_pass(sg_pass pass)
  90. --- to set a new viewport rectangle, call
  91. sg_apply_viewport(int x, int y, int width, int height, bool origin_top_left)
  92. --- to set a new scissor rect, call:
  93. sg_apply_scissor_rect(int x, int y, int width, int height, bool origin_top_left)
  94. both sg_apply_viewport() and sg_apply_scissor_rect() must be called
  95. inside a rendering pass
  96. beginning a pass will reset the viewport to the size of the framebuffer used
  97. in the new pass,
  98. --- to update the content of buffer and image resources, call:
  99. sg_update_buffer(sg_buffer buf, const void* ptr, int num_bytes)
  100. sg_update_image(sg_image img, const sg_image_content* content)
  101. buffers and images to be updated must have been created with
  102. SG_USAGE_DYNAMIC or SG_USAGE_STREAM
  103. --- to check for support of optional features:
  104. bool sg_query_feature(sg_feature feature)
  105. --- if you need to call into the underlying 3D-API directly, you must call:
  106. sg_reset_state_cache()
  107. ...before calling sokol_gfx functions again
  108. BACKEND-SPECIFIC TOPICS:
  109. ========================
  110. --- the GL backends need to know about the internal structure of uniform
  111. blocks, and the texture sampler-name and -type:
  112. typedef struct {
  113. float mvp[16]; // model-view-projection matrix
  114. float offset0[2]; // some 2D vectors
  115. float offset1[2];
  116. float offset2[2];
  117. } params_t;
  118. // uniform block structure and texture image definition in sg_shader_desc:
  119. sg_shader_desc desc = {
  120. // uniform block description (size and internal structure)
  121. .vs.uniform_blocks[0] = {
  122. .size = sizeof(params_t),
  123. .uniforms = {
  124. [0] = { .name="mvp", .type=SG_UNIFORMTYPE_MAT4 },
  125. [1] = { .name="offset0", .type=SG_UNIFORMTYPE_VEC2 },
  126. ...
  127. }
  128. },
  129. // one texture on the fragment-shader-stage, GLES2/WebGL needs name and image type
  130. .fs.images[0] = { .name="tex", .type=SG_IMAGETYPE_ARRAY }
  131. ...
  132. };
  133. --- the Metal and D3D11 backends only need to know the size of uniform blocks,
  134. not their internal member structure, and they only need to know
  135. the type of a texture sampler, not its name:
  136. sg_shader_desc desc = {
  137. .vs.uniform_blocks[0].size = sizeof(params_t),
  138. .fs.images[0].type = SG_IMAGETYPE_ARRAY,
  139. ...
  140. };
  141. --- when creating a pipeline object, GLES2/WebGL need to know the vertex
  142. attribute names as used in the vertex shader when describing vertex
  143. layouts:
  144. sg_pipeline_desc desc = {
  145. .layout = {
  146. .attrs = {
  147. [0] = { .name="position", .format=SG_VERTEXFORMAT_FLOAT3 },
  148. [1] = { .name="color1", .format=SG_VERTEXFORMAT_FLOAT4 }
  149. }
  150. }
  151. };
  152. --- on D3D11 you need to provide a semantic name and semantic index in the
  153. vertex attribute definition instead (see the D3D11 documentation on
  154. D3D11_INPUT_ELEMENT_DESC for details):
  155. sg_pipeline_desc desc = {
  156. .layout = {
  157. .attrs = {
  158. [0] = { .sem_name="POSITION", .sem_index=0, .format=SG_VERTEXFORMAT_FLOAT3 },
  159. [1] = { .sem_name="COLOR", .sem_index=1, .format=SG_VERTEXFORMAT_FLOAT4 }
  160. }
  161. }
  162. };
  163. --- on Metal, GL 3.3 or GLES3/WebGL2, you don't need to provide an attribute
  164. name or semantic name, since vertex attributes can be bound by their slot index
  165. (this is mandatory in Metal, and optional in GL):
  166. sg_pipeline_desc desc = {
  167. .layout = {
  168. .attrs = {
  169. [0] = { .format=SG_VERTEXFORMAT_FLOAT3 },
  170. [1] = { .format=SG_VERTEXFORMAT_FLOAT4 }
  171. }
  172. }
  173. };
  174. TODO:
  175. ====
  176. - talk about asynchronous resource creation
  177. zlib/libpng license
  178. Copyright (c) 2018 Andre Weissflog
  179. This software is provided 'as-is', without any express or implied warranty.
  180. In no event will the authors be held liable for any damages arising from the
  181. use of this software.
  182. Permission is granted to anyone to use this software for any purpose,
  183. including commercial applications, and to alter it and redistribute it
  184. freely, subject to the following restrictions:
  185. 1. The origin of this software must not be misrepresented; you must not
  186. claim that you wrote the original software. If you use this software in a
  187. product, an acknowledgment in the product documentation would be
  188. appreciated but is not required.
  189. 2. Altered source versions must be plainly marked as such, and must not
  190. be misrepresented as being the original software.
  191. 3. This notice may not be removed or altered from any source
  192. distribution.
  193. */
  194. #include <stdint.h>
  195. #include <stdbool.h>
  196. #ifdef __cplusplus
  197. extern "C" {
  198. #endif
  199. /*
  200. Resource id typedefs:
  201. sg_buffer: vertex- and index-buffers
  202. sg_image: textures and render targets
  203. sg_shader: vertex- and fragment-shaders, uniform blocks
  204. sg_pipeline: associated shader and vertex-layouts, and render states
  205. sg_pass: a bundle of render targets and actions on them
  206. Instead of pointers, resource creation functions return a 32-bit
  207. number which uniquely identifies the resource object.
  208. The 32-bit resource id is split into a 16-bit pool index in the lower bits,
  209. and a 16-bit 'unique counter' in the upper bits. The index allows fast
  210. pool lookups, and combined with the unique-mask it allows to detect
  211. 'dangling accesses' (trying to use an object which no longer exists, and
  212. its pool slot has been reused for a new object)
  213. The resource ids are wrapped into a struct so that the compiler
  214. can complain when the wrong resource type is used.
  215. */
  216. typedef struct { uint32_t id; } sg_buffer;
  217. typedef struct { uint32_t id; } sg_image;
  218. typedef struct { uint32_t id; } sg_shader;
  219. typedef struct { uint32_t id; } sg_pipeline;
  220. typedef struct { uint32_t id; } sg_pass;
  221. /*
  222. various compile-time constants
  223. FIXME: it may make sense to convert some of those into defines so
  224. that the user code can override them.
  225. */
  226. enum {
  227. SG_INVALID_ID = 0,
  228. SG_NUM_SHADER_STAGES = 2,
  229. SG_NUM_INFLIGHT_FRAMES = 2,
  230. SG_MAX_COLOR_ATTACHMENTS = 4,
  231. SG_MAX_SHADERSTAGE_BUFFERS = 4,
  232. SG_MAX_SHADERSTAGE_IMAGES = 12,
  233. SG_MAX_SHADERSTAGE_UBS = 4,
  234. SG_MAX_UB_MEMBERS = 16,
  235. SG_MAX_VERTEX_ATTRIBUTES = 16,
  236. SG_MAX_MIPMAPS = 16,
  237. SG_MAX_TEXTUREARRAY_LAYERS = 128
  238. };
  239. /*
  240. sg_feature
  241. These are optional features, use the function
  242. sg_query_feature() to check whether the feature is supported.
  243. */
  244. typedef enum {
  245. SG_FEATURE_INSTANCING,
  246. SG_FEATURE_TEXTURE_COMPRESSION_DXT,
  247. SG_FEATURE_TEXTURE_COMPRESSION_PVRTC,
  248. SG_FEATURE_TEXTURE_COMPRESSION_ATC,
  249. SG_FEATURE_TEXTURE_COMPRESSION_ETC2,
  250. SG_FEATURE_TEXTURE_FLOAT,
  251. SG_FEATURE_TEXTURE_HALF_FLOAT,
  252. SG_FEATURE_ORIGIN_BOTTOM_LEFT,
  253. SG_FEATURE_ORIGIN_TOP_LEFT,
  254. SG_FEATURE_MSAA_RENDER_TARGETS,
  255. SG_FEATURE_PACKED_VERTEX_FORMAT_10_2,
  256. SG_FEATURE_MULTIPLE_RENDER_TARGET,
  257. SG_FEATURE_IMAGETYPE_3D,
  258. SG_FEATURE_IMAGETYPE_ARRAY,
  259. SG_NUM_FEATURES
  260. } sg_feature;
  261. /*
  262. sg_resource_state
  263. The current state of a resource in its resource pool.
  264. Resources start in the INITIAL state, which means the
  265. pool slot is unoccupied and can be allocated. When a resource is
  266. created, first an id is allocated, and the resource pool slot
  267. is set to state ALLOC. After allocation, the resource is
  268. initialized, which may result in the VALID or FAILED state. The
  269. reason why allocation and initialization are separate is because
  270. some resource types (e.g. buffers and images) might be asynchronously
  271. initialized by the user application. If a resource which is not
  272. in the VALID state is attempted to be used for rendering, rendering
  273. operations will silently be dropped.
  274. The special INVALID state is returned in sg_query_xxx_state() if no
  275. resource object exists for the provided resource id.
  276. */
  277. typedef enum {
  278. SG_RESOURCESTATE_INITIAL,
  279. SG_RESOURCESTATE_ALLOC,
  280. SG_RESOURCESTATE_VALID,
  281. SG_RESOURCESTATE_FAILED,
  282. SG_RESOURCESTATE_INVALID,
  283. _SG_RESOURCESTATE_FORCE_U32 = 0x7FFFFFFF
  284. } sg_resource_state;
  285. /*
  286. sg_usage
  287. A resource usage hint describing the update strategy of
  288. buffers and images. This is used in the sg_buffer_desc.usage
  289. and sg_image_desc.usage members when creating buffers
  290. and images:
  291. SG_USAGE_IMMUTABLE: the resource will never be updated with
  292. new data, instead the data content of the
  293. resource must be provided on creation
  294. SG_USAGE_DYNAMIC: the resource will be updated infrequently
  295. with new data (this could range from "once
  296. after creation", to "quite often but not
  297. every frame")
  298. SG_USAGE_STREAM: the resource will be updated each frame
  299. with new content
  300. The rendering backends use this hint to prevent that the
  301. CPU needs to wait for the GPU when attempting to update
  302. a resource that might be currently accessed by the GPU.
  303. Resource content is updated with the function sg_update_buffer() for
  304. buffer objects, and sg_update_image() for image objects. Only
  305. one update is allowed per frame and resource object. The
  306. application must update all data required for rendering (this
  307. means that the update data can be smaller than the resource size,
  308. if only a part of the overall resource size is used for rendering,
  309. you only need to make sure that the data that *is* used is valid.
  310. The default usage is SG_USAGE_IMMUTABLE.
  311. */
  312. typedef enum {
  313. _SG_USAGE_DEFAULT, /* value 0 reserved for default-init */
  314. SG_USAGE_IMMUTABLE,
  315. SG_USAGE_DYNAMIC,
  316. SG_USAGE_STREAM,
  317. _SG_USAGE_NUM,
  318. _SG_USAGE_FORCE_U32 = 0x7FFFFFFF
  319. } sg_usage;
  320. /*
  321. sg_buffer_type
  322. This indicates whether a buffer contains vertex- or index-data,
  323. used in the sg_buffer_desc.type member when creating a buffer.
  324. The default value is SG_BUFFERTYPE_VERTEXBUFFER.
  325. */
  326. typedef enum {
  327. _SG_BUFFERTYPE_DEFAULT, /* value 0 reserved for default-init */
  328. SG_BUFFERTYPE_VERTEXBUFFER,
  329. SG_BUFFERTYPE_INDEXBUFFER,
  330. _SG_BUFFERTYPE_NUM,
  331. _SG_BUFFERTYPE_FORCE_U32 = 0x7FFFFFFF
  332. } sg_buffer_type;
  333. /*
  334. sg_index_type
  335. Indicates whether indexed rendering (fetching vertex-indices from an
  336. index buffer) is used, and if yes, the index data type (16- or 32-bits).
  337. This is used in the sg_pipeline_desc.index_type member when creating a
  338. pipeline object.
  339. The default index type is SG_INDEXTYPE_NONE.
  340. */
  341. typedef enum {
  342. _SG_INDEXTYPE_DEFAULT, /* value 0 reserved for default-init */
  343. SG_INDEXTYPE_NONE,
  344. SG_INDEXTYPE_UINT16,
  345. SG_INDEXTYPE_UINT32,
  346. _SG_INDEXTYPE_NUM,
  347. _SG_INDEXTYPE_FORCE_U32 = 0x7FFFFFFF
  348. } sg_index_type;
  349. /*
  350. sg_image_type
  351. Indicates the basic image type (2D-texture, cubemap, 3D-texture
  352. or 2D-array-texture). 3D- and array-textures are not supported
  353. on the GLES2/WebGL backend. The image type is used in the
  354. sg_image_desc.type member when creating an image.
  355. The default image type when creating an image is SG_IMAGETYPE_2D.
  356. */
  357. typedef enum {
  358. _SG_IMAGETYPE_DEFAULT, /* value 0 reserved for default-init */
  359. SG_IMAGETYPE_2D,
  360. SG_IMAGETYPE_CUBE,
  361. SG_IMAGETYPE_3D,
  362. SG_IMAGETYPE_ARRAY,
  363. _SG_IMAGETYPE_NUM,
  364. _SG_IMAGETYPE_FORCE_U32 = 0x7FFFFFFF
  365. } sg_image_type;
  366. /*
  367. sg_cube_face
  368. The cubemap faces. Use these as indices in the sg_image_desc.content
  369. array.
  370. */
  371. typedef enum {
  372. SG_CUBEFACE_POS_X,
  373. SG_CUBEFACE_NEG_X,
  374. SG_CUBEFACE_POS_Y,
  375. SG_CUBEFACE_NEG_Y,
  376. SG_CUBEFACE_POS_Z,
  377. SG_CUBEFACE_NEG_Z,
  378. SG_CUBEFACE_NUM,
  379. _SG_CUBEFACE_FORCE_U32 = 0x7FFFFFFF
  380. } sg_cube_face;
  381. /*
  382. sg_shader_stage
  383. There are 2 shader stages: vertex- and fragment-shader-stage.
  384. Each shader stage consists of:
  385. - one slot for a shader function (provided as source- or byte-code)
  386. - SG_MAX_SHADERSTAGE_UBS slots for uniform blocks
  387. - SG_MAX_SHADERSTAGE_IMAGES slots for images used as textures by
  388. the shader function
  389. */
  390. typedef enum {
  391. SG_SHADERSTAGE_VS,
  392. SG_SHADERSTAGE_FS,
  393. _SG_SHADERSTAGE_FORCE_U32 = 0x7FFFFFFF
  394. } sg_shader_stage;
  395. /*
  396. sg_pixel_format
  397. This is a common subset of useful and widely supported pixel formats. The
  398. pixel format enum is mainly used when creating an image object in the
  399. sg_image_desc.pixel_format member.
  400. The default pixel format when creating an image is SG_PIXELFORMAT_RGBA8.
  401. */
  402. typedef enum {
  403. _SG_PIXELFORMAT_DEFAULT, /* value 0 reserved for default-init */
  404. SG_PIXELFORMAT_NONE,
  405. SG_PIXELFORMAT_RGBA8,
  406. SG_PIXELFORMAT_RGB8,
  407. SG_PIXELFORMAT_RGBA4,
  408. SG_PIXELFORMAT_R5G6B5,
  409. SG_PIXELFORMAT_R5G5B5A1,
  410. SG_PIXELFORMAT_R10G10B10A2,
  411. SG_PIXELFORMAT_RGBA32F,
  412. SG_PIXELFORMAT_RGBA16F,
  413. SG_PIXELFORMAT_R32F,
  414. SG_PIXELFORMAT_R16F,
  415. SG_PIXELFORMAT_L8,
  416. SG_PIXELFORMAT_DXT1,
  417. SG_PIXELFORMAT_DXT3,
  418. SG_PIXELFORMAT_DXT5,
  419. SG_PIXELFORMAT_DEPTH,
  420. SG_PIXELFORMAT_DEPTHSTENCIL,
  421. SG_PIXELFORMAT_PVRTC2_RGB,
  422. SG_PIXELFORMAT_PVRTC4_RGB,
  423. SG_PIXELFORMAT_PVRTC2_RGBA,
  424. SG_PIXELFORMAT_PVRTC4_RGBA,
  425. SG_PIXELFORMAT_ETC2_RGB8,
  426. SG_PIXELFORMAT_ETC2_SRGB8,
  427. _SG_PIXELFORMAT_NUM,
  428. _SG_PIXELFORMAT_FORCE_U32 = 0x7FFFFFFF
  429. } sg_pixel_format;
  430. /*
  431. sg_primitive_type
  432. This is the common subset of 3D primitive types supported across all 3D
  433. APIs. This is used in the sg_pipeline_desc.primitive_type member when
  434. creating a pipeline object.
  435. The default primitive type is SG_PRIMITIVETYPE_TRIANGLES.
  436. */
  437. typedef enum {
  438. _SG_PRIMITIVETYPE_DEFAULT, /* value 0 reserved for default-init */
  439. SG_PRIMITIVETYPE_POINTS,
  440. SG_PRIMITIVETYPE_LINES,
  441. SG_PRIMITIVETYPE_LINE_STRIP,
  442. SG_PRIMITIVETYPE_TRIANGLES,
  443. SG_PRIMITIVETYPE_TRIANGLE_STRIP,
  444. _SG_PRIMITIVETYPE_NUM,
  445. _SG_PRIMITIVETYPE_FORCE_U32 = 0x7FFFFFFF
  446. } sg_primitive_type;
  447. /*
  448. sg_filter
  449. The filtering mode when sampling a texture image. This is
  450. used in the sg_image_desc.min_filter and sg_image_desc.mag_filter
  451. members when creating an image object.
  452. The default filter mode is SG_FILTER_NEAREST.
  453. */
  454. typedef enum {
  455. _SG_FILTER_DEFAULT, /* value 0 reserved for default-init */
  456. SG_FILTER_NEAREST,
  457. SG_FILTER_LINEAR,
  458. SG_FILTER_NEAREST_MIPMAP_NEAREST,
  459. SG_FILTER_NEAREST_MIPMAP_LINEAR,
  460. SG_FILTER_LINEAR_MIPMAP_NEAREST,
  461. SG_FILTER_LINEAR_MIPMAP_LINEAR,
  462. _SG_FILTER_NUM,
  463. _SG_FILTER_FORCE_U32 = 0x7FFFFFFF
  464. } sg_filter;
  465. /*
  466. sg_wrap
  467. The texture coordinates wrapping mode when sampling a texture
  468. image. This is used in the sg_image_desc.wrap_u, .wrap_v
  469. and .wrap_w members when creating an image.
  470. The default wrap mode is SG_WRAP_REPEAT.
  471. */
  472. typedef enum {
  473. _SG_WRAP_DEFAULT, /* value 0 reserved for default-init */
  474. SG_WRAP_REPEAT,
  475. SG_WRAP_CLAMP_TO_EDGE,
  476. SG_WRAP_MIRRORED_REPEAT,
  477. _SG_WRAP_NUM,
  478. _SG_WRAP_FORCE_U32 = 0x7FFFFFFF
  479. } sg_wrap;
  480. /*
  481. sg_vertex_format
  482. The data type of a vertex component. This is used to describe
  483. the layout of vertex data when creating a pipeline object.
  484. */
  485. typedef enum {
  486. SG_VERTEXFORMAT_INVALID,
  487. SG_VERTEXFORMAT_FLOAT,
  488. SG_VERTEXFORMAT_FLOAT2,
  489. SG_VERTEXFORMAT_FLOAT3,
  490. SG_VERTEXFORMAT_FLOAT4,
  491. SG_VERTEXFORMAT_BYTE4,
  492. SG_VERTEXFORMAT_BYTE4N,
  493. SG_VERTEXFORMAT_UBYTE4,
  494. SG_VERTEXFORMAT_UBYTE4N,
  495. SG_VERTEXFORMAT_SHORT2,
  496. SG_VERTEXFORMAT_SHORT2N,
  497. SG_VERTEXFORMAT_SHORT4,
  498. SG_VERTEXFORMAT_SHORT4N,
  499. SG_VERTEXFORMAT_UINT10_N2,
  500. _SG_VERTEXFORMAT_NUM,
  501. _SG_VERTEXFORMAT_FORCE_U32 = 0x7FFFFFFF
  502. } sg_vertex_format;
  503. /*
  504. sg_vertex_step
  505. Defines whether the input pointer of a vertex input stream is advanced
  506. 'per vertex' or 'per instance'. The default step-func is
  507. SG_VERTEXSTEP_PER_VERTEX. SG_VERTEXSTEP_PER_INSTANCE is used with
  508. instanced-rendering.
  509. The vertex-step is part of the vertex-layout definition
  510. when creating pipeline objects.
  511. */
  512. typedef enum {
  513. _SG_VERTEXSTEP_DEFAULT, /* value 0 reserved for default-init */
  514. SG_VERTEXSTEP_PER_VERTEX,
  515. SG_VERTEXSTEP_PER_INSTANCE,
  516. _SG_VERTEXSTEP_NUM,
  517. _SG_VERTEXSTEP_FORCE_U32 = 0x7FFFFFFF
  518. } sg_vertex_step;
  519. /*
  520. sg_uniform_type
  521. The data type of a uniform block member. This is used to
  522. describe the internal layout of uniform blocks when creating
  523. a shader object.
  524. */
  525. typedef enum {
  526. SG_UNIFORMTYPE_INVALID,
  527. SG_UNIFORMTYPE_FLOAT,
  528. SG_UNIFORMTYPE_FLOAT2,
  529. SG_UNIFORMTYPE_FLOAT3,
  530. SG_UNIFORMTYPE_FLOAT4,
  531. SG_UNIFORMTYPE_MAT4,
  532. _SG_UNIFORMTYPE_NUM,
  533. _SG_UNIFORMTYPE_FORCE_U32 = 0x7FFFFFFF
  534. } sg_uniform_type;
  535. /*
  536. sg_cull_mode
  537. The face-culling mode, this is used in the
  538. sg_pipeline_desc.rasterizer.cull_mode member when creating a
  539. pipeline object.
  540. The default cull mode is SG_CULLMODE_NONE
  541. */
  542. typedef enum {
  543. _SG_CULLMODE_DEFAULT, /* value 0 reserved for default-init */
  544. SG_CULLMODE_NONE,
  545. SG_CULLMODE_FRONT,
  546. SG_CULLMODE_BACK,
  547. _SG_CULLMODE_NUM,
  548. _SG_CULLMODE_FORCE_U32 = 0x7FFFFFFF
  549. } sg_cull_mode;
  550. /*
  551. sg_face_winding
  552. The vertex-winding rule that determines a front-facing primitive. This
  553. is used in the member sg_pipeline_desc.rasterizer.face_winding
  554. when creating a pipeline object.
  555. The default winding is SG_FACEWINDING_CW (clockwise)
  556. */
  557. typedef enum {
  558. _SG_FACEWINDING_DEFAULT, /* value 0 reserved for default-init */
  559. SG_FACEWINDING_CCW,
  560. SG_FACEWINDING_CW,
  561. _SG_FACEWINDING_NUM,
  562. _SG_FACEWINDING_FORCE_U32 = 0x7FFFFFFF
  563. } sg_face_winding;
  564. /*
  565. sg_compare_func
  566. The compare-function for depth- and stencil-ref tests.
  567. This is used when creating pipeline objects in the members:
  568. sg_pipeline_desc
  569. .depth_stencil
  570. .depth_compare_func
  571. .stencil_front.compare_func
  572. .stencil_back.compare_func
  573. The default compare func for depth- and stencil-tests is
  574. SG_COMPAREFUNC_ALWAYS.
  575. */
  576. typedef enum {
  577. _SG_COMPAREFUNC_DEFAULT, /* value 0 reserved for default-init */
  578. SG_COMPAREFUNC_NEVER,
  579. SG_COMPAREFUNC_LESS,
  580. SG_COMPAREFUNC_EQUAL,
  581. SG_COMPAREFUNC_LESS_EQUAL,
  582. SG_COMPAREFUNC_GREATER,
  583. SG_COMPAREFUNC_NOT_EQUAL,
  584. SG_COMPAREFUNC_GREATER_EQUAL,
  585. SG_COMPAREFUNC_ALWAYS,
  586. _SG_COMPAREFUNC_NUM,
  587. _SG_COMPAREFUNC_FORCE_U32 = 0x7FFFFFFF
  588. } sg_compare_func;
  589. /*
  590. sg_stencil_op
  591. The operation performed on a currently stored stencil-value when a
  592. comparison test passes or fails. This is used when creating a pipeline
  593. object in the members:
  594. sg_pipeline_desc
  595. .depth_stencil
  596. .stencil_front
  597. .fail_op
  598. .depth_fail_op
  599. .pass_op
  600. .stencil_back
  601. .fail_op
  602. .depth_fail_op
  603. .pass_op
  604. The default value is SG_STENCILOP_KEEP.
  605. */
  606. typedef enum {
  607. _SG_STENCILOP_DEFAULT, /* value 0 reserved for default-init */
  608. SG_STENCILOP_KEEP,
  609. SG_STENCILOP_ZERO,
  610. SG_STENCILOP_REPLACE,
  611. SG_STENCILOP_INCR_CLAMP,
  612. SG_STENCILOP_DECR_CLAMP,
  613. SG_STENCILOP_INVERT,
  614. SG_STENCILOP_INCR_WRAP,
  615. SG_STENCILOP_DECR_WRAP,
  616. _SG_STENCILOP_NUM,
  617. _SG_STENCILOP_FORCE_U32 = 0x7FFFFFFF
  618. } sg_stencil_op;
  619. /*
  620. sg_blend_factor
  621. The source and destination factors in blending operations.
  622. This is used in the following members when creating a pipeline object:
  623. sg_pipeline_desc
  624. .blend
  625. .src_factor_rgb
  626. .dst_factor_rgb
  627. .src_factor_alpha
  628. .dst_factor_alpha
  629. The default value is SG_BLENDFACTOR_ONE for source
  630. factors, and SG_BLENDFACTOR_ZERO for destination factors.
  631. */
  632. typedef enum {
  633. _SG_BLENDFACTOR_DEFAULT, /* value 0 reserved for default-init */
  634. SG_BLENDFACTOR_ZERO,
  635. SG_BLENDFACTOR_ONE,
  636. SG_BLENDFACTOR_SRC_COLOR,
  637. SG_BLENDFACTOR_ONE_MINUS_SRC_COLOR,
  638. SG_BLENDFACTOR_SRC_ALPHA,
  639. SG_BLENDFACTOR_ONE_MINUS_SRC_ALPHA,
  640. SG_BLENDFACTOR_DST_COLOR,
  641. SG_BLENDFACTOR_ONE_MINUS_DST_COLOR,
  642. SG_BLENDFACTOR_DST_ALPHA,
  643. SG_BLENDFACTOR_ONE_MINUS_DST_ALPHA,
  644. SG_BLENDFACTOR_SRC_ALPHA_SATURATED,
  645. SG_BLENDFACTOR_BLEND_COLOR,
  646. SG_BLENDFACTOR_ONE_MINUS_BLEND_COLOR,
  647. SG_BLENDFACTOR_BLEND_ALPHA,
  648. SG_BLENDFACTOR_ONE_MINUS_BLEND_ALPHA,
  649. _SG_BLENDFACTOR_NUM,
  650. _SG_BLENDFACTOR_FORCE_U32 = 0x7FFFFFFF
  651. } sg_blend_factor;
  652. /*
  653. sg_blend_op
  654. Describes how the source and destination values are combined in the
  655. fragment blending operation. It is used in the following members when
  656. creating a pipeline object:
  657. sg_pipeline_desc
  658. .blend
  659. .op_rgb
  660. .op_alpha
  661. The default value is SG_BLENDOP_ADD.
  662. */
  663. typedef enum {
  664. _SG_BLENDOP_DEFAULT, /* value 0 reserved for default-init */
  665. SG_BLENDOP_ADD,
  666. SG_BLENDOP_SUBTRACT,
  667. SG_BLENDOP_REVERSE_SUBTRACT,
  668. _SG_BLENDOP_NUM,
  669. _SG_BLENDOP_FORCE_U32 = 0x7FFFFFFF
  670. } sg_blend_op;
  671. /*
  672. sg_color_mask
  673. Selects the color channels when writing a fragment color to the
  674. framebuffer. This is used in the members
  675. sg_pipeline_desc.blend.color_write_mask when creating a pipeline object.
  676. The default colormask is SG_COLORMASK_RGBA (write all colors channels)
  677. */
  678. typedef enum {
  679. _SG_COLORMASK_DEFAULT = 0, /* value 0 reserved for default-init */
  680. SG_COLORMASK_NONE = (0x10), /* special value for 'all channels disabled */
  681. SG_COLORMASK_R = (1<<0),
  682. SG_COLORMASK_G = (1<<1),
  683. SG_COLORMASK_B = (1<<2),
  684. SG_COLORMASK_A = (1<<3),
  685. SG_COLORMASK_RGB = 0x7,
  686. SG_COLORMASK_RGBA = 0xF,
  687. _SG_COLORMASK_FORCE_U32 = 0x7FFFFFFF
  688. } sg_color_mask;
  689. /*
  690. sg_action
  691. Defines what action should be performed at the start of a render pass:
  692. SG_ACTION_CLEAR: clear the render target image
  693. SG_ACTION_LOAD: load the previous content of the render target image
  694. SG_ACTION_DONTCARE: leave the render target image content undefined
  695. This is used in the sg_pass_action structure.
  696. The default action for all pass attachments is SG_ACTION_CLEAR, with the
  697. clear color rgba = {0.5f, 0.5f, 0.5f, 1.0f], depth=1.0 and stencil=0.
  698. If you want to override the default behaviour, it is important to not
  699. only set the clear color, but the 'action' field as well (as long as this
  700. is in its _SG_ACTION_DEFAULT, the value fields will be ignored).
  701. */
  702. typedef enum {
  703. _SG_ACTION_DEFAULT,
  704. SG_ACTION_CLEAR,
  705. SG_ACTION_LOAD,
  706. SG_ACTION_DONTCARE,
  707. _SG_ACTION_NUM,
  708. _SG_ACTION_FORCE_U32 = 0x7FFFFFFF
  709. } sg_action;
  710. /*
  711. sg_pass_action
  712. The sg_pass_action struct defines the actions to be performed
  713. at the start of a rendering pass in the functions sg_begin_pass()
  714. and sg_begin_default_pass().
  715. A separate action and clear values can be defined for each
  716. color attachment, and for the depth-stencil attachment.
  717. The default clear values are defined by the macros:
  718. - SG_DEFAULT_CLEAR_RED: 0.5f
  719. - SG_DEFAULT_CLEAR_GREEN: 0.5f
  720. - SG_DEFAULT_CLEAR_BLUE: 0.5f
  721. - SG_DEFAULT_CLEAR_ALPHA: 1.0f
  722. - SG_DEFAULT_CLEAR_DEPTH: 1.0f
  723. - SG_DEFAULT_CLEAR_STENCIL: 0
  724. */
  725. typedef struct {
  726. sg_action action;
  727. float val[4];
  728. } sg_color_attachment_action;
  729. typedef struct {
  730. sg_action action;
  731. float val;
  732. } sg_depth_attachment_action;
  733. typedef struct {
  734. sg_action action;
  735. uint8_t val;
  736. } sg_stencil_attachment_action;
  737. typedef struct {
  738. uint32_t _start_canary;
  739. sg_color_attachment_action colors[SG_MAX_COLOR_ATTACHMENTS];
  740. sg_depth_attachment_action depth;
  741. sg_stencil_attachment_action stencil;
  742. uint32_t _end_canary;
  743. } sg_pass_action;
  744. /*
  745. sg_draw_state
  746. The sg_draw_state structure defines the resource binding slots
  747. of the sokol_gfx render pipeline, used as argument to the
  748. sg_apply_draw_state() function.
  749. A draw state contains:
  750. - 1 pipeline object
  751. - 1..N vertex buffers
  752. - 0..1 index buffers
  753. - 0..N vertex shader stage images
  754. - 0..N fragment shader stage images
  755. The max number of vertex buffer and shader stage images
  756. are defined by the SG_MAX_SHADERSTAGE_BUFFERS and
  757. SG_MAX_SHADERSTAGE_IMAGES configuration constants.
  758. */
  759. typedef struct {
  760. uint32_t _start_canary;
  761. sg_pipeline pipeline;
  762. sg_buffer vertex_buffers[SG_MAX_SHADERSTAGE_BUFFERS];
  763. sg_buffer index_buffer;
  764. sg_image vs_images[SG_MAX_SHADERSTAGE_IMAGES];
  765. sg_image fs_images[SG_MAX_SHADERSTAGE_IMAGES];
  766. uint32_t _end_canary;
  767. } sg_draw_state;
  768. /*
  769. sg_desc
  770. The sg_desc struct contains configuration values for sokol_gfx,
  771. it is used as parameter to the sg_setup() call.
  772. The default configuration is:
  773. .buffer_pool_size: 128
  774. .image_pool_size: 128
  775. .shader_pool_size: 32
  776. .pipeline_pool_size: 64
  777. .pass_pool_size: 16
  778. GL specific:
  779. .gl_force_gles2
  780. if this is true the GL backend will act in "GLES2 fallback mode" even
  781. when compiled with SOKOL_GLES3, this is useful to fall back
  782. to traditional WebGL if a browser doesn't support a WebGL2 context
  783. Metal specific:
  784. (NOTE: All Objective-C object references are transferred through
  785. a bridged (const void*) to sokol_gfx, which will use a unretained
  786. bridged cast (__bridged id<xxx>) to retrieve the Objective-C
  787. references back. Since the bridge cast is unretained, the caller
  788. must hold a strong reference to the Objective-C object for the
  789. duration of the sokol_gfx call!
  790. .mtl_device
  791. a pointer to the MTLDevice object
  792. .mtl_renderpass_descriptor_cb
  793. a C callback function to obtain the MTLRenderPassDescriptor for the
  794. current frame when rendering to the default framebuffer, will be called
  795. in sg_begin_default_pass()
  796. .mtl_drawable_cb
  797. a C callback function to obtain a MTLDrawable for the current
  798. frame when rendering to the default framebuffer, will be called in
  799. sg_end_pass() of the default pass
  800. .mtl_global_uniform_buffer_size
  801. the size of the global uniform buffer in bytes, this must be big
  802. enough to hold all uniform block updates for a single frame,
  803. the default value is 4 MByte (4 * 1024 * 1024)
  804. .mtl_sampler_cache_size
  805. the number of slots in the sampler cache, the Metal backend
  806. will share texture samplers with the same state in this
  807. cache, the default value is 64
  808. D3D11 specific:
  809. .d3d11_device
  810. a pointer to the ID3D11Device object, this must have been created
  811. before sg_setup() is called
  812. .d3d11_device_context
  813. a pointer to the ID3D11DeviceContext object
  814. .d3d11_render_target_view_cb
  815. a C callback function to obtain a pointer to the current
  816. ID3D11RenderTargetView object of the default framebuffer,
  817. this function will be called in sg_begin_pass() when rendering
  818. to the default framebuffer
  819. .d3d11_depth_stencil_view_cb
  820. a C callback function to obtain a pointer to the current
  821. ID3D11DepthStencilView object of the default framebuffer,
  822. this function will be called in sg_begin_pass() when rendering
  823. to the default framebuffer
  824. */
  825. typedef struct {
  826. uint32_t _start_canary;
  827. int buffer_pool_size;
  828. int image_pool_size;
  829. int shader_pool_size;
  830. int pipeline_pool_size;
  831. int pass_pool_size;
  832. /* GL specific */
  833. bool gl_force_gles2;
  834. /* Metal-specific */
  835. const void* mtl_device;
  836. const void* (*mtl_renderpass_descriptor_cb)(void);
  837. const void* (*mtl_drawable_cb)(void);
  838. int mtl_global_uniform_buffer_size;
  839. int mtl_sampler_cache_size;
  840. /* D3D11-specific */
  841. const void* d3d11_device;
  842. const void* d3d11_device_context;
  843. const void* (*d3d11_render_target_view_cb)(void);
  844. const void* (*d3d11_depth_stencil_view_cb)(void);
  845. uint32_t _end_canary;
  846. } sg_desc;
  847. /*
  848. sg_buffer_desc
  849. Creation parameters for sg_buffer objects, used in the
  850. sg_make_buffer() call.
  851. The default configuration is:
  852. .size: 0 (this *must* be set to a valid size in bytes)
  853. .type: SG_BUFFERTYPE_VERTEXBUFFER
  854. .usage: SG_USAGE_IMMUTABLE
  855. .content 0
  856. Buffers with the SG_USAGE_IMMUTABLE usage *must* fill the buffer
  857. with initial data (.content must point to a data chunk with
  858. exactly .size bytes).
  859. ADVANCED TOPIC: Injecting native 3D-API buffers:
  860. The following struct members allow to inject your own GL, Metal
  861. or D3D11 buffers into sokol_gfx:
  862. .gl_buffers[SG_NUM_INFLIGHT_FRAMES]
  863. .mtl_buffers[SG_NUM_INFLIGHT_FRAMES]
  864. .d3d11_buffer
  865. You must still provide all other members except the .content member, and
  866. these must match the creation parameters of the native buffers you
  867. provide. For SG_USAGE_IMMUTABLE, only provide a single native 3D-API
  868. buffer, otherwise you need to provide SG_NUM_INFLIGHT_FRAMES buffers
  869. (only for GL and Metal, not D3D11). Providing multiple buffers for GL and
  870. Metal is necessary because sokol_gfx will rotate through them when
  871. calling sg_update_buffer() to prevent lock-stalls.
  872. Note that it is expected that immutable injected buffer have already been
  873. initialized with content, and the .content member must be 0!
  874. Also you need to call sg_reset_state_cache() after calling native 3D-API
  875. functions, and before calling any sokol_gfx function.
  876. */
  877. typedef struct {
  878. uint32_t _start_canary;
  879. int size;
  880. sg_buffer_type type;
  881. sg_usage usage;
  882. const void* content;
  883. /* GL specific */
  884. uint32_t gl_buffers[SG_NUM_INFLIGHT_FRAMES];
  885. /* Metal specific */
  886. const void* mtl_buffers[SG_NUM_INFLIGHT_FRAMES];
  887. /* D3D11 specific */
  888. const void* d3d11_buffer;
  889. uint32_t _end_canary;
  890. } sg_buffer_desc;
  891. /*
  892. sg_subimage_content
  893. Pointer to and size of a subimage-surface data, this is
  894. used to describe the initial content of immutable-usage images,
  895. or for updating a dynamic- or stream-usage images.
  896. For 3D- or array-textures, one sg_subimage_content item
  897. describes an entire mipmap level consisting of all array- or
  898. 3D-slices of the mipmap level. It is only possible to update
  899. an entire mipmap level, not parts of it.
  900. */
  901. typedef struct {
  902. const void* ptr; /* pointer to subimage data */
  903. int size; /* size in bytes of pointed-to subimage data */
  904. } sg_subimage_content;
  905. /*
  906. sg_image_content
  907. Defines the content of an image through a 2D array
  908. of sg_subimage_content structs. The first array dimension
  909. is the cubemap face, and the second array dimension the
  910. mipmap level.
  911. */
  912. typedef struct {
  913. sg_subimage_content subimage[SG_CUBEFACE_NUM][SG_MAX_MIPMAPS];
  914. } sg_image_content;
  915. /*
  916. sg_image_desc
  917. Creation parameters for sg_image objects, used in the
  918. sg_make_image() call.
  919. The default configuration is:
  920. .type: SG_IMAGETYPE_2D
  921. .render_target: false
  922. .width 0 (must be set to >0)
  923. .height 0 (must be set to >0)
  924. .depth/.layers: 1
  925. .num_mipmaps: 1
  926. .usage: SG_USAGE_IMMUTABLE
  927. .pixel_format: SG_PIXELFORMAT_RGBA8
  928. .sample_count: 1 (only used in render_targets)
  929. .min_filter: SG_FILTER_NEAREST
  930. .mag_filter: SG_FILTER_NEAREST
  931. .wrap_u: SG_WRAP_REPEAT
  932. .wrap_v: SG_WRAP_REPEAT
  933. .wrap_w: SG_WRAP_REPEAT (only SG_IMAGETYPE_3D)
  934. .max_anisotropy 1 (must be 1..16)
  935. .min_lod 0.0f
  936. .max_lod FLT_MAX
  937. .content an sg_image_content struct to define the initial content
  938. SG_IMAGETYPE_ARRAY and SG_IMAGETYPE_3D are not supported on
  939. WebGL/GLES2, use sg_query_feature(SG_FEATURE_IMAGETYPE_ARRAY) and
  940. sg_query_feature(SG_FEATURE_IMAGETYPE_3D) at runtime to check
  941. if array- and 3D-textures are supported.
  942. Images with usage SG_USAGE_IMMUTABLE must be fully initialized by
  943. providing a valid .content member which points to
  944. initialization data.
  945. ADVANCED TOPIC: Injecting native 3D-API textures:
  946. The following struct members allow to inject your own GL, Metal
  947. or D3D11 textures into sokol_gfx:
  948. .gl_textures[SG_NUM_INFLIGHT_FRAMES]
  949. .mtl_textures[SG_NUM_INFLIGHT_FRAMES]
  950. .d3d11_texture
  951. The same rules apply as for injecting native buffers
  952. (see sg_buffer_desc documentation for more details).
  953. */
  954. typedef struct {
  955. uint32_t _start_canary;
  956. sg_image_type type;
  957. bool render_target;
  958. int width;
  959. int height;
  960. union {
  961. int depth;
  962. int layers;
  963. };
  964. int num_mipmaps;
  965. sg_usage usage;
  966. sg_pixel_format pixel_format;
  967. int sample_count;
  968. sg_filter min_filter;
  969. sg_filter mag_filter;
  970. sg_wrap wrap_u;
  971. sg_wrap wrap_v;
  972. sg_wrap wrap_w;
  973. uint32_t max_anisotropy;
  974. float min_lod;
  975. float max_lod;
  976. sg_image_content content;
  977. /* GL specific */
  978. uint32_t gl_textures[SG_NUM_INFLIGHT_FRAMES];
  979. /* Metal specific */
  980. const void* mtl_textures[SG_NUM_INFLIGHT_FRAMES];
  981. /* D3D11 specific */
  982. const void* d3d11_texture;
  983. uint32_t _end_canary;
  984. } sg_image_desc;
  985. /*
  986. sg_shader_desc
  987. The structure sg_shader_desc describes the shaders, uniform blocks
  988. and texture images on the vertex- and fragment-shader stage.
  989. TODO: source code vs byte code, 3D backend API specifics.
  990. */
  991. typedef struct {
  992. const char* name;
  993. sg_uniform_type type;
  994. int array_count;
  995. } sg_shader_uniform_desc;
  996. typedef struct {
  997. int size;
  998. sg_shader_uniform_desc uniforms[SG_MAX_UB_MEMBERS];
  999. } sg_shader_uniform_block_desc;
  1000. typedef struct {
  1001. const char* name;
  1002. sg_image_type type;
  1003. } sg_shader_image_desc;
  1004. typedef struct {
  1005. const char* source;
  1006. const uint8_t* byte_code;
  1007. int byte_code_size;
  1008. const char* entry;
  1009. sg_shader_uniform_block_desc uniform_blocks[SG_MAX_SHADERSTAGE_UBS];
  1010. sg_shader_image_desc images[SG_MAX_SHADERSTAGE_IMAGES];
  1011. } sg_shader_stage_desc;
  1012. typedef struct {
  1013. uint32_t _start_canary;
  1014. sg_shader_stage_desc vs;
  1015. sg_shader_stage_desc fs;
  1016. uint32_t _end_canary;
  1017. } sg_shader_desc;
  1018. /*
  1019. sg_pipeline_desc
  1020. The sg_pipeline_desc struct defines all creation parameters
  1021. for an sg_pipeline object, used as argument to the
  1022. sg_make_pipeline() function:
  1023. - the complete vertex layout for all input vertex buffers
  1024. - a shader object
  1025. - the 3D primitive type (points, lines, triangles, ...)
  1026. - the index type (none, 16- or 32-bit)
  1027. - depth-stencil state
  1028. - alpha-blending state
  1029. - rasterizer state
  1030. If the vertex data has no gaps between vertex components, you can omit
  1031. the .layout.buffers[].stride and layout.attrs[].offset items (leave them
  1032. default-initialized to 0), sokol will then compute the offsets and strides
  1033. from the vertex component formats (.layout.attrs[].offset). Please note
  1034. that ALL vertex attribute offsets must be 0 in order for the the
  1035. automatic offset computation to kick in.
  1036. The default configuration is as follows:
  1037. .layout:
  1038. .buffers[]: vertex buffer layouts
  1039. .stride: 0 (if no stride is given it will be computed)
  1040. .step_func SG_VERTEXSTEP_PER_VERTEX
  1041. .step_rate 1
  1042. .attrs[]: vertex attribute declarations
  1043. .buffer_index 0 the vertex buffer bind slot
  1044. .offset 0 (offsets can be omitted if the vertex layout has no gaps)
  1045. .format SG_VERTEXFORMAT_INVALID (must be initialized!)
  1046. .name 0 (GLES2 requires an attribute name here)
  1047. .sem_name 0 (D3D11 requires a semantic name here)
  1048. .sem_index 0 (D3D11 requires a semantic index here)
  1049. .shader: 0 (must be intilized with a valid sg_shader id!)
  1050. .primitive_type: SG_PRIMITIVETYPE_TRIANGLES
  1051. .index_type: SG_INDEXTYPE_NONE
  1052. .depth_stencil:
  1053. .stencil_front, .stencil_back:
  1054. .fail_op: SG_STENCILOP_KEEP
  1055. .depth_fail_op: SG_STENCILOP_KEEP
  1056. .pass_op: SG_STENCILOP_KEEP
  1057. .compare_func SG_COMPAREFUNC_ALWAYS
  1058. .depth_compare_func: SG_COMPAREFUNC_ALWAYS
  1059. .depth_write_enabled: false
  1060. .stencil_enabled: false
  1061. .stencil_read_mask: 0
  1062. .stencil_write_mask: 0
  1063. .stencil_ref: 0
  1064. .blend:
  1065. .enabled: false
  1066. .src_factor_rgb: SG_BLENDFACTOR_ONE
  1067. .dst_factor_rgb: SG_BLENDFACTOR_ZERO
  1068. .op_rgb: SG_BLENDOP_ADD
  1069. .src_factor_alpha: SG_BLENDFACTOR_ONE
  1070. .dst_factor_alpha: SG_BLENDFACTOR_ZERO
  1071. .op_alpha: SG_BLENDOP_ADD
  1072. .color_write_mask: SG_COLORMASK_RGBA
  1073. .color_attachment_count 1
  1074. .color_format SG_PIXELFORMAT_RGBA8
  1075. .depth_format SG_PIXELFORMAT_DEPTHSTENCIL
  1076. .blend_color: { 0.0f, 0.0f, 0.0f, 0.0f }
  1077. .rasterizer:
  1078. .alpha_to_coverage_enabled: false
  1079. .cull_mode: SG_CULLMODE_NONE
  1080. .face_winding: SG_FACEWINDING_CW
  1081. .sample_count: 1
  1082. .depth_bias: 0.0f
  1083. .depth_bias_slope_scale: 0.0f
  1084. .depth_bias_clamp: 0.0f
  1085. */
  1086. typedef struct {
  1087. int stride;
  1088. sg_vertex_step step_func;
  1089. int step_rate;
  1090. } sg_buffer_layout_desc;
  1091. typedef struct {
  1092. const char* name;
  1093. const char* sem_name;
  1094. int sem_index;
  1095. int buffer_index;
  1096. int offset;
  1097. sg_vertex_format format;
  1098. } sg_vertex_attr_desc;
  1099. typedef struct {
  1100. sg_buffer_layout_desc buffers[SG_MAX_SHADERSTAGE_BUFFERS];
  1101. sg_vertex_attr_desc attrs[SG_MAX_VERTEX_ATTRIBUTES];
  1102. } sg_layout_desc;
  1103. typedef struct {
  1104. sg_stencil_op fail_op;
  1105. sg_stencil_op depth_fail_op;
  1106. sg_stencil_op pass_op;
  1107. sg_compare_func compare_func;
  1108. } sg_stencil_state;
  1109. typedef struct {
  1110. sg_stencil_state stencil_front;
  1111. sg_stencil_state stencil_back;
  1112. sg_compare_func depth_compare_func;
  1113. bool depth_write_enabled;
  1114. bool stencil_enabled;
  1115. uint8_t stencil_read_mask;
  1116. uint8_t stencil_write_mask;
  1117. uint8_t stencil_ref;
  1118. } sg_depth_stencil_state;
  1119. typedef struct {
  1120. bool enabled;
  1121. sg_blend_factor src_factor_rgb;
  1122. sg_blend_factor dst_factor_rgb;
  1123. sg_blend_op op_rgb;
  1124. sg_blend_factor src_factor_alpha;
  1125. sg_blend_factor dst_factor_alpha;
  1126. sg_blend_op op_alpha;
  1127. uint8_t color_write_mask;
  1128. int color_attachment_count;
  1129. sg_pixel_format color_format;
  1130. sg_pixel_format depth_format;
  1131. float blend_color[4];
  1132. } sg_blend_state;
  1133. typedef struct {
  1134. bool alpha_to_coverage_enabled;
  1135. sg_cull_mode cull_mode;
  1136. sg_face_winding face_winding;
  1137. int sample_count;
  1138. float depth_bias;
  1139. float depth_bias_slope_scale;
  1140. float depth_bias_clamp;
  1141. } sg_rasterizer_state;
  1142. typedef struct {
  1143. uint32_t _start_canary;
  1144. sg_layout_desc layout;
  1145. sg_shader shader;
  1146. sg_primitive_type primitive_type;
  1147. sg_index_type index_type;
  1148. sg_depth_stencil_state depth_stencil;
  1149. sg_blend_state blend;
  1150. sg_rasterizer_state rasterizer;
  1151. uint32_t _end_canary;
  1152. } sg_pipeline_desc;
  1153. /*
  1154. sg_pass_desc
  1155. Creation parameters for an sg_pass object, used as argument
  1156. to the sg_make_pass() function.
  1157. A pass object contains 1..4 color-attachments and none, or one,
  1158. depth-stencil-attachment. Each attachment consists of
  1159. an image, and two additional indices describing
  1160. which subimage the pass will render: one mipmap index, and
  1161. if the image is a cubemap, array-texture or 3D-texture, the
  1162. face-index, array-layer or depth-slice.
  1163. Pass images must fulfill the following requirements:
  1164. All images must have:
  1165. - been created as render target (sg_image_desc.render_target = true)
  1166. - the same size
  1167. - the same sample count
  1168. In addition, all color-attachment images must have the same
  1169. pixel format.
  1170. */
  1171. typedef struct {
  1172. sg_image image;
  1173. int mip_level;
  1174. union {
  1175. int face;
  1176. int layer;
  1177. int slice;
  1178. };
  1179. } sg_attachment_desc;
  1180. typedef struct {
  1181. uint32_t _start_canary;
  1182. sg_attachment_desc color_attachments[SG_MAX_COLOR_ATTACHMENTS];
  1183. sg_attachment_desc depth_stencil_attachment;
  1184. uint32_t _end_canary;
  1185. } sg_pass_desc;
  1186. /* setup and misc functions */
  1187. extern void sg_setup(const sg_desc* desc);
  1188. extern void sg_shutdown();
  1189. extern bool sg_isvalid();
  1190. extern bool sg_query_feature(sg_feature feature);
  1191. extern void sg_reset_state_cache();
  1192. /* resource creation, destruction and updating */
  1193. extern sg_buffer sg_make_buffer(const sg_buffer_desc* desc);
  1194. extern sg_image sg_make_image(const sg_image_desc* desc);
  1195. extern sg_shader sg_make_shader(const sg_shader_desc* desc);
  1196. extern sg_pipeline sg_make_pipeline(const sg_pipeline_desc* desc);
  1197. extern sg_pass sg_make_pass(const sg_pass_desc* desc);
  1198. extern void sg_destroy_buffer(sg_buffer buf);
  1199. extern void sg_destroy_image(sg_image img);
  1200. extern void sg_destroy_shader(sg_shader shd);
  1201. extern void sg_destroy_pipeline(sg_pipeline pip);
  1202. extern void sg_destroy_pass(sg_pass pass);
  1203. extern void sg_update_buffer(sg_buffer buf, const void* data_ptr, int data_size);
  1204. extern void sg_update_image(sg_image img, const sg_image_content* data);
  1205. /* get resource state (initial, alloc, valid, failed) */
  1206. extern sg_resource_state sg_query_buffer_state(sg_buffer buf);
  1207. extern sg_resource_state sg_query_image_state(sg_image img);
  1208. extern sg_resource_state sg_query_shader_state(sg_shader shd);
  1209. extern sg_resource_state sg_query_pipeline_state(sg_pipeline pip);
  1210. extern sg_resource_state sg_query_pass_state(sg_pass pass);
  1211. /* rendering functions */
  1212. extern void sg_begin_default_pass(const sg_pass_action* pass_action, int width, int height);
  1213. extern void sg_begin_pass(sg_pass pass, const sg_pass_action* pass_action);
  1214. extern void sg_apply_viewport(int x, int y, int width, int height, bool origin_top_left);
  1215. extern void sg_apply_scissor_rect(int x, int y, int width, int height, bool origin_top_left);
  1216. extern void sg_apply_draw_state(const sg_draw_state* ds);
  1217. extern void sg_apply_uniform_block(sg_shader_stage stage, int ub_index, const void* data, int num_bytes);
  1218. extern void sg_draw(int base_element, int num_elements, int num_instances);
  1219. extern void sg_end_pass();
  1220. extern void sg_commit();
  1221. /* separate resource allocation and initialization (for async setup) */
  1222. extern sg_buffer sg_alloc_buffer();
  1223. extern sg_image sg_alloc_image();
  1224. extern sg_shader sg_alloc_shader();
  1225. extern sg_pipeline sg_alloc_pipeline();
  1226. extern sg_pass sg_alloc_pass();
  1227. extern void sg_init_buffer(sg_buffer buf_id, const sg_buffer_desc* desc);
  1228. extern void sg_init_image(sg_image img_id, const sg_image_desc* desc);
  1229. extern void sg_init_shader(sg_shader shd_id, const sg_shader_desc* desc);
  1230. extern void sg_init_pipeline(sg_pipeline pip_id, const sg_pipeline_desc* desc);
  1231. extern void sg_init_pass(sg_pass pass_id, const sg_pass_desc* desc);
  1232. extern void sg_fail_buffer(sg_buffer buf_id);
  1233. extern void sg_fail_image(sg_image img_id);
  1234. extern void sg_fail_shader(sg_shader shd_id);
  1235. extern void sg_fail_pipeline(sg_pipeline pip_id);
  1236. extern void sg_fail_pass(sg_pass pass_id);
  1237. #ifdef __cplusplus
  1238. } /* extern "C" */
  1239. #endif
  1240. /*--- IMPLEMENTATION ---------------------------------------------------------*/
  1241. #ifdef SOKOL_IMPL
  1242. #ifndef SOKOL_DEBUG
  1243. #ifdef _DEBUG
  1244. #define SOKOL_DEBUG (1)
  1245. #endif
  1246. #endif
  1247. #ifndef SOKOL_ASSERT
  1248. #include <assert.h>
  1249. #define SOKOL_ASSERT(c) assert(c)
  1250. #endif
  1251. #ifndef SOKOL_VALIDATE_BEGIN
  1252. #define SOKOL_VALIDATE_BEGIN() _sg_validate_begin()
  1253. #endif
  1254. #ifndef SOKOL_VALIDATE
  1255. #define SOKOL_VALIDATE(cond, err) _sg_validate(cond, err)
  1256. #endif
  1257. #ifndef SOKOL_VALIDATE_END
  1258. #define SOKOL_VALIDATE_END() _sg_validate_end()
  1259. #endif
  1260. #ifndef SOKOL_UNREACHABLE
  1261. #define SOKOL_UNREACHABLE SOKOL_ASSERT(false)
  1262. #endif
  1263. #ifndef SOKOL_MALLOC
  1264. #include <stdlib.h>
  1265. #define SOKOL_MALLOC(s) malloc(s)
  1266. #define SOKOL_FREE(p) free(p)
  1267. #endif
  1268. #ifndef SOKOL_LOG
  1269. #ifdef SOKOL_DEBUG
  1270. #include <stdio.h>
  1271. #define SOKOL_LOG(s) { SOKOL_ASSERT(s); puts(s); }
  1272. #else
  1273. #define SOKOL_LOG(s)
  1274. #endif
  1275. #endif
  1276. #if !(defined(SOKOL_GLCORE33)||defined(SOKOL_GLES2)||defined(SOKOL_GLES3)||defined(SOKOL_D3D11)||defined(SOKOL_METAL_MACOS)||defined(SOKOL_METAL_IOS))
  1277. #error "Please select a backend with SOKOL_GLCORE33, SOKOL_GLES2, SOKOL_GLES3, SOKOL_D3D11, SOKOL_METAL_MACOS or SOKOL_METAL_IOS"
  1278. #endif
  1279. #ifndef _SOKOL_PRIVATE
  1280. #if defined(__GNUC__)
  1281. #define _SOKOL_PRIVATE __attribute__((unused)) static
  1282. #else
  1283. #define _SOKOL_PRIVATE static
  1284. #endif
  1285. #endif
  1286. /* default clear values */
  1287. #ifndef SG_DEFAULT_CLEAR_RED
  1288. #define SG_DEFAULT_CLEAR_RED (0.5f)
  1289. #endif
  1290. #ifndef SG_DEFAULT_CLEAR_GREEN
  1291. #define SG_DEFAULT_CLEAR_GREEN (0.5f)
  1292. #endif
  1293. #ifndef SG_DEFAULT_CLEAR_BLUE
  1294. #define SG_DEFAULT_CLEAR_BLUE (0.5f)
  1295. #endif
  1296. #ifndef SG_DEFAULT_CLEAR_ALPHA
  1297. #define SG_DEFAULT_CLEAR_ALPHA (1.0f)
  1298. #endif
  1299. #ifndef SG_DEFAULT_CLEAR_DEPTH
  1300. #define SG_DEFAULT_CLEAR_DEPTH (1.0f)
  1301. #endif
  1302. #ifndef SG_DEFAULT_CLEAR_STENCIL
  1303. #define SG_DEFAULT_CLEAR_STENCIL (0)
  1304. #endif
  1305. #ifdef __cplusplus
  1306. extern "C" {
  1307. #endif
  1308. enum {
  1309. _SG_SLOT_SHIFT = 16,
  1310. _SG_SLOT_MASK = (1<<_SG_SLOT_SHIFT)-1,
  1311. _SG_MAX_POOL_SIZE = (1<<_SG_SLOT_SHIFT),
  1312. _SG_DEFAULT_BUFFER_POOL_SIZE = 128,
  1313. _SG_DEFAULT_IMAGE_POOL_SIZE = 128,
  1314. _SG_DEFAULT_SHADER_POOL_SIZE = 32,
  1315. _SG_DEFAULT_PIPELINE_POOL_SIZE = 64,
  1316. _SG_DEFAULT_PASS_POOL_SIZE = 16,
  1317. };
  1318. /* helper macros */
  1319. #define _sg_def(val, def) (((val) == 0) ? (def) : (val))
  1320. #define _sg_def_flt(val, def) (((val) == 0.0f) ? (def) : (val))
  1321. #define _sg_min(a,b) ((a<b)?a:b)
  1322. #define _sg_max(a,b) ((a>b)?a:b)
  1323. #define _sg_clamp(v,v0,v1) ((v<v0)?(v0):((v>v1)?(v1):(v)))
  1324. #define _sg_fequal(val,cmp,delta) (((val-cmp)> -delta)&&((val-cmp)<delta))
  1325. /*-- helper functions --------------------------------------------------------*/
  1326. /* return byte size of a vertex format */
  1327. _SOKOL_PRIVATE int _sg_vertexformat_bytesize(sg_vertex_format fmt) {
  1328. switch (fmt) {
  1329. case SG_VERTEXFORMAT_FLOAT: return 4;
  1330. case SG_VERTEXFORMAT_FLOAT2: return 8;
  1331. case SG_VERTEXFORMAT_FLOAT3: return 12;
  1332. case SG_VERTEXFORMAT_FLOAT4: return 16;
  1333. case SG_VERTEXFORMAT_BYTE4: return 4;
  1334. case SG_VERTEXFORMAT_BYTE4N: return 4;
  1335. case SG_VERTEXFORMAT_UBYTE4: return 4;
  1336. case SG_VERTEXFORMAT_UBYTE4N: return 4;
  1337. case SG_VERTEXFORMAT_SHORT2: return 4;
  1338. case SG_VERTEXFORMAT_SHORT2N: return 4;
  1339. case SG_VERTEXFORMAT_SHORT4: return 8;
  1340. case SG_VERTEXFORMAT_SHORT4N: return 8;
  1341. case SG_VERTEXFORMAT_UINT10_N2: return 4;
  1342. case SG_VERTEXFORMAT_INVALID: return 0;
  1343. default:
  1344. SOKOL_UNREACHABLE;
  1345. return -1;
  1346. }
  1347. }
  1348. /* return the byte size of a shader uniform */
  1349. _SOKOL_PRIVATE int _sg_uniform_size(sg_uniform_type type, int count) {
  1350. switch (type) {
  1351. case SG_UNIFORMTYPE_INVALID: return 0;
  1352. case SG_UNIFORMTYPE_FLOAT: return 4 * count;
  1353. case SG_UNIFORMTYPE_FLOAT2: return 8 * count;
  1354. case SG_UNIFORMTYPE_FLOAT3: return 12 * count; /* FIXME: std140??? */
  1355. case SG_UNIFORMTYPE_FLOAT4: return 16 * count;
  1356. case SG_UNIFORMTYPE_MAT4: return 64 * count;
  1357. default:
  1358. SOKOL_UNREACHABLE;
  1359. return -1;
  1360. }
  1361. }
  1362. /* return true if pixel format is a compressed format */
  1363. _SOKOL_PRIVATE bool _sg_is_compressed_pixel_format(sg_pixel_format fmt) {
  1364. switch (fmt) {
  1365. case SG_PIXELFORMAT_DXT1:
  1366. case SG_PIXELFORMAT_DXT3:
  1367. case SG_PIXELFORMAT_DXT5:
  1368. case SG_PIXELFORMAT_PVRTC2_RGB:
  1369. case SG_PIXELFORMAT_PVRTC4_RGB:
  1370. case SG_PIXELFORMAT_PVRTC2_RGBA:
  1371. case SG_PIXELFORMAT_PVRTC4_RGBA:
  1372. case SG_PIXELFORMAT_ETC2_RGB8:
  1373. case SG_PIXELFORMAT_ETC2_SRGB8:
  1374. return true;
  1375. default:
  1376. return false;
  1377. }
  1378. }
  1379. /* return true if pixel format is a valid render target format */
  1380. _SOKOL_PRIVATE bool _sg_is_valid_rendertarget_color_format(sg_pixel_format fmt) {
  1381. switch (fmt) {
  1382. case SG_PIXELFORMAT_RGBA8:
  1383. case SG_PIXELFORMAT_R10G10B10A2:
  1384. case SG_PIXELFORMAT_RGBA32F:
  1385. case SG_PIXELFORMAT_RGBA16F:
  1386. return true;
  1387. default:
  1388. return false;
  1389. }
  1390. }
  1391. /* return true if pixel format is a valid depth format */
  1392. _SOKOL_PRIVATE bool _sg_is_valid_rendertarget_depth_format(sg_pixel_format fmt) {
  1393. switch (fmt) {
  1394. case SG_PIXELFORMAT_DEPTH:
  1395. case SG_PIXELFORMAT_DEPTHSTENCIL:
  1396. return true;
  1397. default:
  1398. return false;
  1399. }
  1400. }
  1401. /* return true if pixel format is a depth-stencil format */
  1402. _SOKOL_PRIVATE bool _sg_is_depth_stencil_format(sg_pixel_format fmt) {
  1403. /* FIXME: more depth stencil formats? */
  1404. return (SG_PIXELFORMAT_DEPTHSTENCIL == fmt);
  1405. }
  1406. /* return the bytes-per-pixel for a pixel format */
  1407. _SOKOL_PRIVATE int _sg_pixelformat_bytesize(sg_pixel_format fmt) {
  1408. switch (fmt) {
  1409. case SG_PIXELFORMAT_RGBA32F:
  1410. return 16;
  1411. case SG_PIXELFORMAT_RGBA16F:
  1412. return 8;
  1413. case SG_PIXELFORMAT_RGBA8:
  1414. case SG_PIXELFORMAT_R10G10B10A2:
  1415. case SG_PIXELFORMAT_R32F:
  1416. return 4;
  1417. case SG_PIXELFORMAT_RGB8:
  1418. return 3;
  1419. case SG_PIXELFORMAT_R5G5B5A1:
  1420. case SG_PIXELFORMAT_R5G6B5:
  1421. case SG_PIXELFORMAT_RGBA4:
  1422. case SG_PIXELFORMAT_R16F:
  1423. return 2;
  1424. case SG_PIXELFORMAT_L8:
  1425. return 1;
  1426. default:
  1427. SOKOL_UNREACHABLE;
  1428. return 0;
  1429. }
  1430. }
  1431. /* return row pitch for an image */
  1432. _SOKOL_PRIVATE int _sg_row_pitch(sg_pixel_format fmt, int width) {
  1433. int pitch;
  1434. switch (fmt) {
  1435. case SG_PIXELFORMAT_DXT1:
  1436. case SG_PIXELFORMAT_ETC2_RGB8:
  1437. case SG_PIXELFORMAT_ETC2_SRGB8:
  1438. pitch = ((width + 3) / 4) * 8;
  1439. pitch = pitch < 8 ? 8 : pitch;
  1440. break;
  1441. case SG_PIXELFORMAT_DXT3:
  1442. case SG_PIXELFORMAT_DXT5:
  1443. pitch = ((width + 3) / 4) * 16;
  1444. pitch = pitch < 16 ? 16 : pitch;
  1445. break;
  1446. case SG_PIXELFORMAT_PVRTC4_RGB:
  1447. case SG_PIXELFORMAT_PVRTC4_RGBA:
  1448. {
  1449. const int block_size = 4*4;
  1450. const int bpp = 4;
  1451. int width_blocks = width / 4;
  1452. width_blocks = width_blocks < 2 ? 2 : width_blocks;
  1453. pitch = width_blocks * ((block_size * bpp) / 8);
  1454. }
  1455. break;
  1456. case SG_PIXELFORMAT_PVRTC2_RGB:
  1457. case SG_PIXELFORMAT_PVRTC2_RGBA:
  1458. {
  1459. const int block_size = 8*4;
  1460. const int bpp = 2;
  1461. int width_blocks = width / 4;
  1462. width_blocks = width_blocks < 2 ? 2 : width_blocks;
  1463. pitch = width_blocks * ((block_size * bpp) / 8);
  1464. }
  1465. break;
  1466. default:
  1467. pitch = width * _sg_pixelformat_bytesize(fmt);
  1468. break;
  1469. }
  1470. return pitch;
  1471. }
  1472. /* return pitch of a 2D subimage / texture slice */
  1473. _SOKOL_PRIVATE int _sg_surface_pitch(sg_pixel_format fmt, int width, int height) {
  1474. int num_rows = 0;
  1475. switch (fmt) {
  1476. case SG_PIXELFORMAT_DXT1:
  1477. case SG_PIXELFORMAT_DXT3:
  1478. case SG_PIXELFORMAT_DXT5:
  1479. case SG_PIXELFORMAT_ETC2_RGB8:
  1480. case SG_PIXELFORMAT_ETC2_SRGB8:
  1481. case SG_PIXELFORMAT_PVRTC2_RGB:
  1482. case SG_PIXELFORMAT_PVRTC2_RGBA:
  1483. case SG_PIXELFORMAT_PVRTC4_RGB:
  1484. case SG_PIXELFORMAT_PVRTC4_RGBA:
  1485. num_rows = ((height + 3) / 4);
  1486. break;
  1487. default:
  1488. num_rows = height;
  1489. break;
  1490. }
  1491. if (num_rows < 1) {
  1492. num_rows = 1;
  1493. }
  1494. return num_rows * _sg_row_pitch(fmt, width);
  1495. }
  1496. /* resolve pass action defaults into a new pass action struct */
  1497. _SOKOL_PRIVATE void _sg_resolve_default_pass_action(const sg_pass_action* from, sg_pass_action* to) {
  1498. SOKOL_ASSERT(from && to);
  1499. *to = *from;
  1500. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  1501. if (to->colors[i].action == _SG_ACTION_DEFAULT) {
  1502. to->colors[i].action = SG_ACTION_CLEAR;
  1503. to->colors[i].val[0] = SG_DEFAULT_CLEAR_RED;
  1504. to->colors[i].val[1] = SG_DEFAULT_CLEAR_GREEN;
  1505. to->colors[i].val[2] = SG_DEFAULT_CLEAR_BLUE;
  1506. to->colors[i].val[3] = SG_DEFAULT_CLEAR_ALPHA;
  1507. }
  1508. }
  1509. if (to->depth.action == _SG_ACTION_DEFAULT) {
  1510. to->depth.action = SG_ACTION_CLEAR;
  1511. to->depth.val = SG_DEFAULT_CLEAR_DEPTH;
  1512. }
  1513. if (to->stencil.action == _SG_ACTION_DEFAULT) {
  1514. to->stencil.action = SG_ACTION_CLEAR;
  1515. to->stencil.val = SG_DEFAULT_CLEAR_STENCIL;
  1516. }
  1517. }
  1518. /*-- resource pool slots (must be defined before rendering backend) ----------*/
  1519. typedef struct {
  1520. uint32_t id;
  1521. sg_resource_state state;
  1522. } _sg_slot;
  1523. _SOKOL_PRIVATE int _sg_slot_index(uint32_t id) {
  1524. return id & _SG_SLOT_MASK;
  1525. }
  1526. #ifdef __cplusplus
  1527. } /* extern "C" */
  1528. #endif
  1529. /*== GL BACKEND ==============================================================*/
  1530. #if defined(SOKOL_GLCORE33) || defined(SOKOL_GLES2) || defined(SOKOL_GLES3)
  1531. /* strstr(), memset() */
  1532. #include <string.h>
  1533. #ifdef __cplusplus
  1534. extern "C" {
  1535. #endif
  1536. #ifndef GL_UNSIGNED_INT_2_10_10_10_REV
  1537. #define GL_UNSIGNED_INT_2_10_10_10_REV 0x8368
  1538. #endif
  1539. #ifndef GL_UNSIGNED_INT_24_8
  1540. #define GL_UNSIGNED_INT_24_8 0x84FA
  1541. #endif
  1542. #ifndef GL_TEXTURE_MAX_ANISOTROPY_EXT
  1543. #define GL_TEXTURE_MAX_ANISOTROPY_EXT 0x84FE
  1544. #endif
  1545. #ifndef GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT
  1546. #define GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT 0x84FF
  1547. #endif
  1548. #ifndef GL_COMPRESSED_RGBA_S3TC_DXT1_EXT
  1549. #define GL_COMPRESSED_RGBA_S3TC_DXT1_EXT 0x83F1
  1550. #endif
  1551. #ifndef GL_COMPRESSED_RGBA_S3TC_DXT3_EXT
  1552. #define GL_COMPRESSED_RGBA_S3TC_DXT3_EXT 0x83F2
  1553. #endif
  1554. #ifndef GL_COMPRESSED_RGBA_S3TC_DXT5_EXT
  1555. #define GL_COMPRESSED_RGBA_S3TC_DXT5_EXT 0x83F3
  1556. #endif
  1557. #ifndef GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG
  1558. #define GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG 0x8C01
  1559. #endif
  1560. #ifndef GL_COMPRESSED_RGB_PVRTC_4BPPV1_IMG
  1561. #define GL_COMPRESSED_RGB_PVRTC_4BPPV1_IMG 0x8C00
  1562. #endif
  1563. #ifndef GL_COMPRESSED_RGBA_PVRTC_2BPPV1_IMG
  1564. #define GL_COMPRESSED_RGBA_PVRTC_2BPPV1_IMG 0x8C03
  1565. #endif
  1566. #ifndef GL_COMPRESSED_RGBA_PVRTC_4BPPV1_IMG
  1567. #define GL_COMPRESSED_RGBA_PVRTC_4BPPV1_IMG 0x8C02
  1568. #endif
  1569. #ifndef GL_COMPRESSED_RGB8_ETC2
  1570. #define GL_COMPRESSED_RGB8_ETC2 0x9274
  1571. #endif
  1572. #ifndef GL_COMPRESSED_SRGB8_ETC2
  1573. #define GL_COMPRESSED_SRGB8_ETC2 0x9275
  1574. #endif
  1575. #ifndef GL_DEPTH24_STENCIL8
  1576. #define GL_DEPTH24_STENCIL8 0x88F0
  1577. #endif
  1578. #ifndef GL_HALF_FLOAT
  1579. #define GL_HALF_FLOAT 0x140B
  1580. #endif
  1581. #ifndef GL_DEPTH_STENCIL
  1582. #define GL_DEPTH_STENCIL 0x84F9
  1583. #endif
  1584. #ifdef SOKOL_GLES2
  1585. #define glVertexAttribDivisor(index, divisor) glVertexAttribDivisorEXT(index, divisor)
  1586. #define glDrawArraysInstanced(mode, first, count, instancecount) glDrawArraysInstancedEXT(mode, first, count, instancecount)
  1587. #define glDrawElementsInstanced(mode, count, type, indices, instancecount) glDrawElementsInstancedEXT(mode, count, type, indices, instancecount)
  1588. #endif
  1589. #define _SG_GL_CHECK_ERROR() { SOKOL_ASSERT(glGetError() == GL_NO_ERROR); }
  1590. /* true if runnin in GLES2-fallback mode */
  1591. static bool _sg_gl_gles2;
  1592. /*-- type translation --------------------------------------------------------*/
  1593. _SOKOL_PRIVATE GLenum _sg_gl_buffer_target(sg_buffer_type t) {
  1594. switch (t) {
  1595. case SG_BUFFERTYPE_VERTEXBUFFER: return GL_ARRAY_BUFFER;
  1596. case SG_BUFFERTYPE_INDEXBUFFER: return GL_ELEMENT_ARRAY_BUFFER;
  1597. default: SOKOL_UNREACHABLE; return 0;
  1598. }
  1599. }
  1600. _SOKOL_PRIVATE GLenum _sg_gl_texture_target(sg_image_type t) {
  1601. switch (t) {
  1602. case SG_IMAGETYPE_2D: return GL_TEXTURE_2D;
  1603. case SG_IMAGETYPE_CUBE: return GL_TEXTURE_CUBE_MAP;
  1604. #if !defined(SOKOL_GLES2)
  1605. case SG_IMAGETYPE_3D: return GL_TEXTURE_3D;
  1606. case SG_IMAGETYPE_ARRAY: return GL_TEXTURE_2D_ARRAY;
  1607. #endif
  1608. default: SOKOL_UNREACHABLE; return 0;
  1609. }
  1610. }
  1611. _SOKOL_PRIVATE GLenum _sg_gl_usage(sg_usage u) {
  1612. switch (u) {
  1613. case SG_USAGE_IMMUTABLE: return GL_STATIC_DRAW;
  1614. case SG_USAGE_DYNAMIC: return GL_DYNAMIC_DRAW;
  1615. case SG_USAGE_STREAM: return GL_STREAM_DRAW;
  1616. default: SOKOL_UNREACHABLE; return 0;
  1617. }
  1618. }
  1619. _SOKOL_PRIVATE GLenum _sg_gl_shader_stage(sg_shader_stage stage) {
  1620. switch (stage) {
  1621. case SG_SHADERSTAGE_VS: return GL_VERTEX_SHADER;
  1622. case SG_SHADERSTAGE_FS: return GL_FRAGMENT_SHADER;
  1623. default: SOKOL_UNREACHABLE; return 0;
  1624. }
  1625. }
  1626. _SOKOL_PRIVATE GLint _sg_gl_vertexformat_size(sg_vertex_format fmt) {
  1627. switch (fmt) {
  1628. case SG_VERTEXFORMAT_FLOAT: return 1;
  1629. case SG_VERTEXFORMAT_FLOAT2: return 2;
  1630. case SG_VERTEXFORMAT_FLOAT3: return 3;
  1631. case SG_VERTEXFORMAT_FLOAT4: return 4;
  1632. case SG_VERTEXFORMAT_BYTE4: return 4;
  1633. case SG_VERTEXFORMAT_BYTE4N: return 4;
  1634. case SG_VERTEXFORMAT_UBYTE4: return 4;
  1635. case SG_VERTEXFORMAT_UBYTE4N: return 4;
  1636. case SG_VERTEXFORMAT_SHORT2: return 2;
  1637. case SG_VERTEXFORMAT_SHORT2N: return 2;
  1638. case SG_VERTEXFORMAT_SHORT4: return 4;
  1639. case SG_VERTEXFORMAT_SHORT4N: return 4;
  1640. case SG_VERTEXFORMAT_UINT10_N2: return 4;
  1641. default: SOKOL_UNREACHABLE; return 0;
  1642. }
  1643. }
  1644. _SOKOL_PRIVATE GLenum _sg_gl_vertexformat_type(sg_vertex_format fmt) {
  1645. switch (fmt) {
  1646. case SG_VERTEXFORMAT_FLOAT:
  1647. case SG_VERTEXFORMAT_FLOAT2:
  1648. case SG_VERTEXFORMAT_FLOAT3:
  1649. case SG_VERTEXFORMAT_FLOAT4:
  1650. return GL_FLOAT;
  1651. case SG_VERTEXFORMAT_BYTE4:
  1652. case SG_VERTEXFORMAT_BYTE4N:
  1653. return GL_BYTE;
  1654. case SG_VERTEXFORMAT_UBYTE4:
  1655. case SG_VERTEXFORMAT_UBYTE4N:
  1656. return GL_UNSIGNED_BYTE;
  1657. case SG_VERTEXFORMAT_SHORT2:
  1658. case SG_VERTEXFORMAT_SHORT2N:
  1659. case SG_VERTEXFORMAT_SHORT4:
  1660. case SG_VERTEXFORMAT_SHORT4N:
  1661. return GL_SHORT;
  1662. case SG_VERTEXFORMAT_UINT10_N2:
  1663. return GL_UNSIGNED_INT_2_10_10_10_REV;
  1664. default:
  1665. SOKOL_UNREACHABLE; return 0;
  1666. }
  1667. }
  1668. _SOKOL_PRIVATE GLboolean _sg_gl_vertexformat_normalized(sg_vertex_format fmt) {
  1669. switch (fmt) {
  1670. case SG_VERTEXFORMAT_BYTE4N:
  1671. case SG_VERTEXFORMAT_UBYTE4N:
  1672. case SG_VERTEXFORMAT_SHORT2N:
  1673. case SG_VERTEXFORMAT_SHORT4N:
  1674. case SG_VERTEXFORMAT_UINT10_N2:
  1675. return GL_TRUE;
  1676. default:
  1677. return GL_FALSE;
  1678. }
  1679. }
  1680. _SOKOL_PRIVATE GLenum _sg_gl_primitive_type(sg_primitive_type t) {
  1681. switch (t) {
  1682. case SG_PRIMITIVETYPE_POINTS: return GL_POINTS;
  1683. case SG_PRIMITIVETYPE_LINES: return GL_LINES;
  1684. case SG_PRIMITIVETYPE_LINE_STRIP: return GL_LINE_STRIP;
  1685. case SG_PRIMITIVETYPE_TRIANGLES: return GL_TRIANGLES;
  1686. case SG_PRIMITIVETYPE_TRIANGLE_STRIP: return GL_TRIANGLE_STRIP;
  1687. default: SOKOL_UNREACHABLE; return 0;
  1688. }
  1689. }
  1690. _SOKOL_PRIVATE GLenum _sg_gl_index_type(sg_index_type t) {
  1691. switch (t) {
  1692. case SG_INDEXTYPE_NONE: return 0;
  1693. case SG_INDEXTYPE_UINT16: return GL_UNSIGNED_SHORT;
  1694. case SG_INDEXTYPE_UINT32: return GL_UNSIGNED_INT;
  1695. default: SOKOL_UNREACHABLE; return 0;
  1696. }
  1697. }
  1698. _SOKOL_PRIVATE GLenum _sg_gl_compare_func(sg_compare_func cmp) {
  1699. switch (cmp) {
  1700. case SG_COMPAREFUNC_NEVER: return GL_NEVER;
  1701. case SG_COMPAREFUNC_LESS: return GL_LESS;
  1702. case SG_COMPAREFUNC_EQUAL: return GL_EQUAL;
  1703. case SG_COMPAREFUNC_LESS_EQUAL: return GL_LEQUAL;
  1704. case SG_COMPAREFUNC_GREATER: return GL_GREATER;
  1705. case SG_COMPAREFUNC_NOT_EQUAL: return GL_NOTEQUAL;
  1706. case SG_COMPAREFUNC_GREATER_EQUAL: return GL_GEQUAL;
  1707. case SG_COMPAREFUNC_ALWAYS: return GL_ALWAYS;
  1708. default: SOKOL_UNREACHABLE; return 0;
  1709. }
  1710. }
  1711. _SOKOL_PRIVATE GLenum _sg_gl_stencil_op(sg_stencil_op op) {
  1712. switch (op) {
  1713. case SG_STENCILOP_KEEP: return GL_KEEP;
  1714. case SG_STENCILOP_ZERO: return GL_ZERO;
  1715. case SG_STENCILOP_REPLACE: return GL_REPLACE;
  1716. case SG_STENCILOP_INCR_CLAMP: return GL_INCR;
  1717. case SG_STENCILOP_DECR_CLAMP: return GL_DECR;
  1718. case SG_STENCILOP_INVERT: return GL_INVERT;
  1719. case SG_STENCILOP_INCR_WRAP: return GL_INCR_WRAP;
  1720. case SG_STENCILOP_DECR_WRAP: return GL_DECR_WRAP;
  1721. default: SOKOL_UNREACHABLE; return 0;
  1722. }
  1723. }
  1724. _SOKOL_PRIVATE GLenum _sg_gl_blend_factor(sg_blend_factor f) {
  1725. switch (f) {
  1726. case SG_BLENDFACTOR_ZERO: return GL_ZERO;
  1727. case SG_BLENDFACTOR_ONE: return GL_ONE;
  1728. case SG_BLENDFACTOR_SRC_COLOR: return GL_SRC_COLOR;
  1729. case SG_BLENDFACTOR_ONE_MINUS_SRC_COLOR: return GL_ONE_MINUS_SRC_COLOR;
  1730. case SG_BLENDFACTOR_SRC_ALPHA: return GL_SRC_ALPHA;
  1731. case SG_BLENDFACTOR_ONE_MINUS_SRC_ALPHA: return GL_ONE_MINUS_SRC_ALPHA;
  1732. case SG_BLENDFACTOR_DST_COLOR: return GL_DST_COLOR;
  1733. case SG_BLENDFACTOR_ONE_MINUS_DST_COLOR: return GL_ONE_MINUS_DST_COLOR;
  1734. case SG_BLENDFACTOR_DST_ALPHA: return GL_DST_ALPHA;
  1735. case SG_BLENDFACTOR_ONE_MINUS_DST_ALPHA: return GL_ONE_MINUS_DST_ALPHA;
  1736. case SG_BLENDFACTOR_SRC_ALPHA_SATURATED: return GL_SRC_ALPHA_SATURATE;
  1737. case SG_BLENDFACTOR_BLEND_COLOR: return GL_CONSTANT_COLOR;
  1738. case SG_BLENDFACTOR_ONE_MINUS_BLEND_COLOR: return GL_ONE_MINUS_CONSTANT_COLOR;
  1739. case SG_BLENDFACTOR_BLEND_ALPHA: return GL_CONSTANT_ALPHA;
  1740. case SG_BLENDFACTOR_ONE_MINUS_BLEND_ALPHA: return GL_ONE_MINUS_CONSTANT_ALPHA;
  1741. default: SOKOL_UNREACHABLE; return 0;
  1742. }
  1743. }
  1744. _SOKOL_PRIVATE GLenum _sg_gl_blend_op(sg_blend_op op) {
  1745. switch (op) {
  1746. case SG_BLENDOP_ADD: return GL_FUNC_ADD;
  1747. case SG_BLENDOP_SUBTRACT: return GL_FUNC_SUBTRACT;
  1748. case SG_BLENDOP_REVERSE_SUBTRACT: return GL_FUNC_REVERSE_SUBTRACT;
  1749. default: SOKOL_UNREACHABLE; return 0;
  1750. }
  1751. }
  1752. _SOKOL_PRIVATE GLenum _sg_gl_filter(sg_filter f) {
  1753. switch (f) {
  1754. case SG_FILTER_NEAREST: return GL_NEAREST;
  1755. case SG_FILTER_LINEAR: return GL_LINEAR;
  1756. case SG_FILTER_NEAREST_MIPMAP_NEAREST: return GL_NEAREST_MIPMAP_NEAREST;
  1757. case SG_FILTER_NEAREST_MIPMAP_LINEAR: return GL_NEAREST_MIPMAP_LINEAR;
  1758. case SG_FILTER_LINEAR_MIPMAP_NEAREST: return GL_LINEAR_MIPMAP_NEAREST;
  1759. case SG_FILTER_LINEAR_MIPMAP_LINEAR: return GL_LINEAR_MIPMAP_LINEAR;
  1760. default: SOKOL_UNREACHABLE; return 0;
  1761. }
  1762. }
  1763. _SOKOL_PRIVATE GLenum _sg_gl_wrap(sg_wrap w) {
  1764. switch (w) {
  1765. case SG_WRAP_CLAMP_TO_EDGE: return GL_CLAMP_TO_EDGE;
  1766. case SG_WRAP_REPEAT: return GL_REPEAT;
  1767. case SG_WRAP_MIRRORED_REPEAT: return GL_MIRRORED_REPEAT;
  1768. default: SOKOL_UNREACHABLE; return 0;
  1769. }
  1770. }
  1771. _SOKOL_PRIVATE GLenum _sg_gl_teximage_type(sg_pixel_format fmt) {
  1772. switch (fmt) {
  1773. case SG_PIXELFORMAT_RGBA32F:
  1774. case SG_PIXELFORMAT_R32F:
  1775. return GL_FLOAT;
  1776. case SG_PIXELFORMAT_RGBA16F:
  1777. case SG_PIXELFORMAT_R16F:
  1778. return GL_HALF_FLOAT;
  1779. case SG_PIXELFORMAT_RGBA8:
  1780. case SG_PIXELFORMAT_RGB8:
  1781. case SG_PIXELFORMAT_L8:
  1782. return GL_UNSIGNED_BYTE;
  1783. case SG_PIXELFORMAT_R10G10B10A2:
  1784. return GL_UNSIGNED_INT_2_10_10_10_REV;
  1785. case SG_PIXELFORMAT_R5G5B5A1:
  1786. return GL_UNSIGNED_SHORT_5_5_5_1;
  1787. case SG_PIXELFORMAT_R5G6B5:
  1788. return GL_UNSIGNED_SHORT_5_6_5;
  1789. case SG_PIXELFORMAT_RGBA4:
  1790. return GL_UNSIGNED_SHORT_4_4_4_4;
  1791. case SG_PIXELFORMAT_DEPTH:
  1792. /* FIXME */
  1793. return GL_UNSIGNED_SHORT;
  1794. case SG_PIXELFORMAT_DEPTHSTENCIL:
  1795. /* FIXME */
  1796. return GL_UNSIGNED_INT_24_8;
  1797. default:
  1798. SOKOL_UNREACHABLE; return 0;
  1799. }
  1800. }
  1801. _SOKOL_PRIVATE GLenum _sg_gl_teximage_format(sg_pixel_format fmt) {
  1802. switch (fmt) {
  1803. case SG_PIXELFORMAT_NONE:
  1804. return 0;
  1805. case SG_PIXELFORMAT_RGBA8:
  1806. case SG_PIXELFORMAT_R5G5B5A1:
  1807. case SG_PIXELFORMAT_RGBA4:
  1808. case SG_PIXELFORMAT_RGBA32F:
  1809. case SG_PIXELFORMAT_RGBA16F:
  1810. case SG_PIXELFORMAT_R10G10B10A2:
  1811. return GL_RGBA;
  1812. case SG_PIXELFORMAT_RGB8:
  1813. case SG_PIXELFORMAT_R5G6B5:
  1814. return GL_RGB;
  1815. case SG_PIXELFORMAT_L8:
  1816. case SG_PIXELFORMAT_R32F:
  1817. case SG_PIXELFORMAT_R16F:
  1818. #if defined(SOKOL_GLES2)
  1819. return GL_LUMINANCE;
  1820. #else
  1821. return GL_RED;
  1822. #endif
  1823. case SG_PIXELFORMAT_DEPTH:
  1824. return GL_DEPTH_COMPONENT;
  1825. case SG_PIXELFORMAT_DEPTHSTENCIL:
  1826. return GL_DEPTH_STENCIL;
  1827. case SG_PIXELFORMAT_DXT1:
  1828. return GL_COMPRESSED_RGBA_S3TC_DXT1_EXT;
  1829. case SG_PIXELFORMAT_DXT3:
  1830. return GL_COMPRESSED_RGBA_S3TC_DXT3_EXT;
  1831. case SG_PIXELFORMAT_DXT5:
  1832. return GL_COMPRESSED_RGBA_S3TC_DXT5_EXT;
  1833. case SG_PIXELFORMAT_PVRTC2_RGB:
  1834. return GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG;
  1835. case SG_PIXELFORMAT_PVRTC4_RGB:
  1836. return GL_COMPRESSED_RGB_PVRTC_4BPPV1_IMG;
  1837. case SG_PIXELFORMAT_PVRTC2_RGBA:
  1838. return GL_COMPRESSED_RGBA_PVRTC_2BPPV1_IMG;
  1839. case SG_PIXELFORMAT_PVRTC4_RGBA:
  1840. return GL_COMPRESSED_RGBA_PVRTC_4BPPV1_IMG;
  1841. case SG_PIXELFORMAT_ETC2_RGB8:
  1842. return GL_COMPRESSED_RGB8_ETC2;
  1843. case SG_PIXELFORMAT_ETC2_SRGB8:
  1844. return GL_COMPRESSED_SRGB8_ETC2;
  1845. default:
  1846. SOKOL_UNREACHABLE; return 0;
  1847. }
  1848. }
  1849. _SOKOL_PRIVATE GLenum _sg_gl_teximage_internal_format(sg_pixel_format fmt) {
  1850. #if defined(SOKOL_GLES2)
  1851. return _sg_gl_teximage_format(fmt);
  1852. #else
  1853. if (_sg_gl_gles2) {
  1854. return _sg_gl_teximage_format(fmt);
  1855. }
  1856. else {
  1857. switch (fmt) {
  1858. case SG_PIXELFORMAT_NONE:
  1859. return 0;
  1860. case SG_PIXELFORMAT_RGBA8:
  1861. return GL_RGBA8;
  1862. case SG_PIXELFORMAT_RGB8:
  1863. return GL_RGB8;
  1864. case SG_PIXELFORMAT_RGBA4:
  1865. return GL_RGBA4;
  1866. case SG_PIXELFORMAT_R5G6B5:
  1867. #if defined(SOKOL_GLES3)
  1868. return GL_RGB565;
  1869. #else
  1870. return GL_RGB5;
  1871. #endif
  1872. case SG_PIXELFORMAT_R5G5B5A1:
  1873. return GL_RGB5_A1;
  1874. case SG_PIXELFORMAT_R10G10B10A2:
  1875. return GL_RGB10_A2;
  1876. case SG_PIXELFORMAT_RGBA32F:
  1877. return GL_RGBA32F;
  1878. case SG_PIXELFORMAT_RGBA16F:
  1879. return GL_RGBA16F;
  1880. case SG_PIXELFORMAT_R32F:
  1881. return GL_R32F;
  1882. case SG_PIXELFORMAT_R16F:
  1883. return GL_R16F;
  1884. case SG_PIXELFORMAT_L8:
  1885. return GL_R8;
  1886. case SG_PIXELFORMAT_DEPTH:
  1887. /* FIXME */
  1888. return GL_DEPTH_COMPONENT16;
  1889. case SG_PIXELFORMAT_DEPTHSTENCIL:
  1890. return GL_DEPTH24_STENCIL8;
  1891. case SG_PIXELFORMAT_DXT1:
  1892. return GL_COMPRESSED_RGBA_S3TC_DXT1_EXT;
  1893. case SG_PIXELFORMAT_DXT3:
  1894. return GL_COMPRESSED_RGBA_S3TC_DXT3_EXT;
  1895. case SG_PIXELFORMAT_DXT5:
  1896. return GL_COMPRESSED_RGBA_S3TC_DXT5_EXT;
  1897. case SG_PIXELFORMAT_PVRTC2_RGB:
  1898. return GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG;
  1899. case SG_PIXELFORMAT_PVRTC4_RGB:
  1900. return GL_COMPRESSED_RGB_PVRTC_4BPPV1_IMG;
  1901. case SG_PIXELFORMAT_PVRTC2_RGBA:
  1902. return GL_COMPRESSED_RGBA_PVRTC_2BPPV1_IMG;
  1903. case SG_PIXELFORMAT_PVRTC4_RGBA:
  1904. return GL_COMPRESSED_RGBA_PVRTC_4BPPV1_IMG;
  1905. case SG_PIXELFORMAT_ETC2_RGB8:
  1906. return GL_COMPRESSED_RGB8_ETC2;
  1907. case SG_PIXELFORMAT_ETC2_SRGB8:
  1908. return GL_COMPRESSED_SRGB8_ETC2;
  1909. default:
  1910. SOKOL_UNREACHABLE; return 0;
  1911. }
  1912. }
  1913. #endif
  1914. }
  1915. _SOKOL_PRIVATE GLenum _sg_gl_cubeface_target(int face_index) {
  1916. switch (face_index) {
  1917. case 0: return GL_TEXTURE_CUBE_MAP_POSITIVE_X;
  1918. case 1: return GL_TEXTURE_CUBE_MAP_NEGATIVE_X;
  1919. case 2: return GL_TEXTURE_CUBE_MAP_POSITIVE_Y;
  1920. case 3: return GL_TEXTURE_CUBE_MAP_NEGATIVE_Y;
  1921. case 4: return GL_TEXTURE_CUBE_MAP_POSITIVE_Z;
  1922. case 5: return GL_TEXTURE_CUBE_MAP_NEGATIVE_Z;
  1923. default: SOKOL_UNREACHABLE; return 0;
  1924. }
  1925. }
  1926. _SOKOL_PRIVATE GLenum _sg_gl_depth_attachment_format(sg_pixel_format fmt) {
  1927. switch (fmt) {
  1928. case SG_PIXELFORMAT_DEPTH: return GL_DEPTH_COMPONENT16;
  1929. case SG_PIXELFORMAT_DEPTHSTENCIL: return GL_DEPTH24_STENCIL8;
  1930. default: SOKOL_UNREACHABLE; return 0;
  1931. }
  1932. }
  1933. /*-- GL backend resource declarations ----------------------------------------*/
  1934. typedef struct {
  1935. _sg_slot slot;
  1936. int size;
  1937. sg_buffer_type type;
  1938. sg_usage usage;
  1939. uint32_t upd_frame_index;
  1940. int num_slots;
  1941. int active_slot;
  1942. GLuint gl_buf[SG_NUM_INFLIGHT_FRAMES];
  1943. bool ext_buffers; /* if true, external buffers were injected with sg_buffer_desc.gl_buffers */
  1944. } _sg_buffer;
  1945. _SOKOL_PRIVATE void _sg_init_buffer(_sg_buffer* buf) {
  1946. SOKOL_ASSERT(buf);
  1947. memset(buf, 0, sizeof(_sg_buffer));
  1948. }
  1949. typedef struct {
  1950. _sg_slot slot;
  1951. sg_image_type type;
  1952. bool render_target;
  1953. int width;
  1954. int height;
  1955. int depth;
  1956. int num_mipmaps;
  1957. sg_usage usage;
  1958. sg_pixel_format pixel_format;
  1959. int sample_count;
  1960. sg_filter min_filter;
  1961. sg_filter mag_filter;
  1962. sg_wrap wrap_u;
  1963. sg_wrap wrap_v;
  1964. sg_wrap wrap_w;
  1965. uint32_t max_anisotropy;
  1966. GLenum gl_target;
  1967. GLuint gl_depth_render_buffer;
  1968. GLuint gl_msaa_render_buffer;
  1969. uint32_t upd_frame_index;
  1970. int num_slots;
  1971. int active_slot;
  1972. GLuint gl_tex[SG_NUM_INFLIGHT_FRAMES];
  1973. bool ext_textures; /* if true, external textures were injected with sg_image_desc.gl_textures */
  1974. } _sg_image;
  1975. _SOKOL_PRIVATE void _sg_init_image(_sg_image* img) {
  1976. SOKOL_ASSERT(img);
  1977. memset(img, 0, sizeof(_sg_image));
  1978. }
  1979. typedef struct {
  1980. GLint gl_loc;
  1981. sg_uniform_type type;
  1982. uint8_t count;
  1983. uint16_t offset;
  1984. } _sg_uniform;
  1985. typedef struct {
  1986. int size;
  1987. int num_uniforms;
  1988. _sg_uniform uniforms[SG_MAX_UB_MEMBERS];
  1989. } _sg_uniform_block;
  1990. typedef struct {
  1991. sg_image_type type;
  1992. GLint gl_loc;
  1993. int gl_tex_slot;
  1994. } _sg_shader_image;
  1995. typedef struct {
  1996. int num_uniform_blocks;
  1997. int num_images;
  1998. _sg_uniform_block uniform_blocks[SG_MAX_SHADERSTAGE_UBS];
  1999. _sg_shader_image images[SG_MAX_SHADERSTAGE_IMAGES];
  2000. } _sg_shader_stage;
  2001. typedef struct {
  2002. _sg_slot slot;
  2003. GLuint gl_prog;
  2004. _sg_shader_stage stage[SG_NUM_SHADER_STAGES];
  2005. } _sg_shader;
  2006. _SOKOL_PRIVATE void _sg_init_shader(_sg_shader* shd) {
  2007. SOKOL_ASSERT(shd);
  2008. memset(shd, 0, sizeof(_sg_shader));
  2009. }
  2010. typedef struct {
  2011. int8_t vb_index; /* -1 if attr is not enabled */
  2012. int8_t divisor; /* -1 if not initialized */
  2013. uint8_t stride;
  2014. uint8_t size;
  2015. uint8_t normalized;
  2016. uint8_t offset;
  2017. GLenum type;
  2018. } _sg_gl_attr;
  2019. _SOKOL_PRIVATE void _sg_gl_init_attr(_sg_gl_attr* attr) {
  2020. attr->vb_index = -1;
  2021. attr->divisor = -1;
  2022. attr->stride = 0;
  2023. attr->size = 0;
  2024. attr->normalized = 0;
  2025. attr->offset = 0;
  2026. attr->type = 0;
  2027. }
  2028. typedef struct {
  2029. _sg_slot slot;
  2030. _sg_shader* shader;
  2031. sg_shader shader_id;
  2032. sg_primitive_type primitive_type;
  2033. sg_index_type index_type;
  2034. bool vertex_layout_valid[SG_MAX_SHADERSTAGE_BUFFERS];
  2035. int color_attachment_count;
  2036. sg_pixel_format color_format;
  2037. sg_pixel_format depth_format;
  2038. int sample_count;
  2039. _sg_gl_attr gl_attrs[SG_MAX_VERTEX_ATTRIBUTES];
  2040. sg_depth_stencil_state depth_stencil;
  2041. sg_blend_state blend;
  2042. sg_rasterizer_state rast;
  2043. } _sg_pipeline;
  2044. _SOKOL_PRIVATE void _sg_init_pipeline(_sg_pipeline* pip) {
  2045. SOKOL_ASSERT(pip);
  2046. memset(pip, 0, sizeof(_sg_pipeline));
  2047. }
  2048. typedef struct {
  2049. _sg_image* image;
  2050. sg_image image_id;
  2051. int mip_level;
  2052. int slice;
  2053. GLuint gl_msaa_resolve_buffer;
  2054. } _sg_attachment;
  2055. typedef struct {
  2056. _sg_slot slot;
  2057. GLuint gl_fb;
  2058. int num_color_atts;
  2059. _sg_attachment color_atts[SG_MAX_COLOR_ATTACHMENTS];
  2060. _sg_attachment ds_att;
  2061. } _sg_pass;
  2062. _SOKOL_PRIVATE void _sg_init_pass(_sg_pass* pass) {
  2063. SOKOL_ASSERT(pass);
  2064. memset(pass, 0, sizeof(_sg_pass));
  2065. }
  2066. _SOKOL_PRIVATE void _sg_gl_init_stencil_state(sg_stencil_state* s) {
  2067. SOKOL_ASSERT(s);
  2068. s->fail_op = SG_STENCILOP_KEEP;
  2069. s->depth_fail_op = SG_STENCILOP_KEEP;
  2070. s->pass_op = SG_STENCILOP_KEEP;
  2071. s->compare_func = SG_COMPAREFUNC_ALWAYS;
  2072. }
  2073. _SOKOL_PRIVATE void _sg_gl_init_depth_stencil_state(sg_depth_stencil_state* s) {
  2074. SOKOL_ASSERT(s);
  2075. _sg_gl_init_stencil_state(&s->stencil_front);
  2076. _sg_gl_init_stencil_state(&s->stencil_back);
  2077. s->depth_compare_func = SG_COMPAREFUNC_ALWAYS;
  2078. s->depth_write_enabled = false;
  2079. s->stencil_enabled = false;
  2080. s->stencil_read_mask = 0;
  2081. s->stencil_write_mask = 0;
  2082. s->stencil_ref = 0;
  2083. }
  2084. _SOKOL_PRIVATE void _sg_gl_init_blend_state(sg_blend_state* s) {
  2085. SOKOL_ASSERT(s);
  2086. s->enabled = false;
  2087. s->src_factor_rgb = SG_BLENDFACTOR_ONE;
  2088. s->dst_factor_rgb = SG_BLENDFACTOR_ZERO;
  2089. s->op_rgb = SG_BLENDOP_ADD;
  2090. s->src_factor_alpha = SG_BLENDFACTOR_ONE;
  2091. s->dst_factor_alpha = SG_BLENDFACTOR_ZERO;
  2092. s->op_alpha = SG_BLENDOP_ADD;
  2093. s->color_write_mask = SG_COLORMASK_RGBA;
  2094. for (int i = 0; i < 4; i++) {
  2095. s->blend_color[i] = 0.0f;
  2096. }
  2097. }
  2098. _SOKOL_PRIVATE void _sg_gl_init_rasterizer_state(sg_rasterizer_state* s) {
  2099. SOKOL_ASSERT(s);
  2100. s->alpha_to_coverage_enabled = false;
  2101. s->cull_mode = SG_CULLMODE_NONE;
  2102. s->face_winding = SG_FACEWINDING_CW;
  2103. s->sample_count = 1;
  2104. s->depth_bias = 0.0f;
  2105. s->depth_bias_slope_scale = 0.0f;
  2106. s->depth_bias_clamp = 0.0f;
  2107. }
  2108. /*-- state cache implementation ----------------------------------------------*/
  2109. /*-- state cache and backend structs -----------------------------------------*/
  2110. typedef struct {
  2111. _sg_gl_attr gl_attr;
  2112. GLuint gl_vbuf;
  2113. } _sg_gl_cache_attr;
  2114. typedef struct {
  2115. sg_depth_stencil_state ds;
  2116. sg_blend_state blend;
  2117. sg_rasterizer_state rast;
  2118. bool polygon_offset_enabled;
  2119. _sg_gl_cache_attr attrs[SG_MAX_VERTEX_ATTRIBUTES];
  2120. GLuint cur_gl_ib;
  2121. GLenum cur_primitive_type;
  2122. GLenum cur_index_type;
  2123. _sg_pipeline* cur_pipeline;
  2124. sg_pipeline cur_pipeline_id;
  2125. } _sg_state_cache;
  2126. _SOKOL_PRIVATE void _sg_gl_reset_state_cache(_sg_state_cache* cache) {
  2127. SOKOL_ASSERT(cache);
  2128. glBindBuffer(GL_ARRAY_BUFFER, 0);
  2129. glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
  2130. for (int i = 0; i < SG_MAX_VERTEX_ATTRIBUTES; i++) {
  2131. _sg_gl_init_attr(&cache->attrs[i].gl_attr);
  2132. cache->attrs[i].gl_vbuf = 0;
  2133. glDisableVertexAttribArray(i);
  2134. }
  2135. cache->cur_gl_ib = 0;
  2136. cache->cur_primitive_type = GL_TRIANGLES;
  2137. cache->cur_index_type = 0;
  2138. /* resource bindings */
  2139. cache->cur_pipeline = 0;
  2140. cache->cur_pipeline_id.id = SG_INVALID_ID;
  2141. /* depth-stencil state */
  2142. _sg_gl_init_depth_stencil_state(&cache->ds);
  2143. glEnable(GL_DEPTH_TEST);
  2144. glDepthFunc(GL_ALWAYS);
  2145. glDepthMask(GL_FALSE);
  2146. glDisable(GL_STENCIL_TEST);
  2147. glStencilFunc(GL_ALWAYS, 0, 0);
  2148. glStencilOp(GL_KEEP, GL_KEEP, GL_KEEP);
  2149. glStencilMask(0);
  2150. /* blend state */
  2151. _sg_gl_init_blend_state(&cache->blend);
  2152. glDisable(GL_BLEND);
  2153. glBlendFuncSeparate(GL_ONE, GL_ZERO, GL_ONE, GL_ZERO);
  2154. glBlendEquationSeparate(GL_FUNC_ADD, GL_FUNC_ADD);
  2155. glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);
  2156. glBlendColor(0.0f, 0.0f, 0.0f, 0.0f);
  2157. /* rasterizer state */
  2158. _sg_gl_init_rasterizer_state(&cache->rast);
  2159. cache->polygon_offset_enabled = false;
  2160. glPolygonOffset(0.0f, 0.0f);
  2161. glDisable(GL_POLYGON_OFFSET_FILL);
  2162. glDisable(GL_CULL_FACE);
  2163. glFrontFace(GL_CW);
  2164. glCullFace(GL_BACK);
  2165. glEnable(GL_SCISSOR_TEST);
  2166. glDisable(GL_SAMPLE_ALPHA_TO_COVERAGE);
  2167. glEnable(GL_DITHER);
  2168. glDisable(GL_POLYGON_OFFSET_FILL);
  2169. #if defined(SOKOL_GLCORE33)
  2170. glEnable(GL_MULTISAMPLE);
  2171. glEnable(GL_PROGRAM_POINT_SIZE);
  2172. #endif
  2173. }
  2174. /*-- main GL backend state and functions -------------------------------------*/
  2175. typedef struct {
  2176. bool valid;
  2177. bool in_pass;
  2178. GLuint default_framebuffer;
  2179. int cur_pass_width;
  2180. int cur_pass_height;
  2181. _sg_pass* cur_pass;
  2182. sg_pass cur_pass_id;
  2183. _sg_state_cache cache;
  2184. bool features[SG_NUM_FEATURES];
  2185. bool ext_anisotropic;
  2186. GLint max_anisotropy;
  2187. #if !defined(SOKOL_GLES2)
  2188. GLuint vao;
  2189. #endif
  2190. } _sg_backend;
  2191. static _sg_backend _sg_gl;
  2192. _SOKOL_PRIVATE void _sg_setup_backend(const sg_desc* desc) {
  2193. _sg_gl_gles2 = desc->gl_force_gles2;
  2194. memset(&_sg_gl, 0, sizeof(_sg_gl));
  2195. _sg_gl.valid = true;
  2196. _sg_gl.in_pass = false;
  2197. glGetIntegerv(GL_FRAMEBUFFER_BINDING, (GLint*)&_sg_gl.default_framebuffer);
  2198. _sg_gl.cur_pass_width = 0;
  2199. _sg_gl.cur_pass_height = 0;
  2200. _sg_gl.cur_pass = 0;
  2201. _sg_gl.cur_pass_id.id = SG_INVALID_ID;
  2202. #if !defined(SOKOL_GLES2)
  2203. if (!_sg_gl_gles2) {
  2204. glGenVertexArrays(1, &_sg_gl.vao);
  2205. glBindVertexArray(_sg_gl.vao);
  2206. }
  2207. #endif
  2208. _sg_gl_reset_state_cache(&_sg_gl.cache);
  2209. /* initialize feature flags */
  2210. for (int i = 0; i < SG_NUM_FEATURES; i++) {
  2211. _sg_gl.features[i] = false;
  2212. }
  2213. _sg_gl.ext_anisotropic = false;
  2214. _sg_gl.features[SG_FEATURE_ORIGIN_BOTTOM_LEFT] = true;
  2215. #if defined(SOKOL_GLCORE33)
  2216. _sg_gl.features[SG_FEATURE_INSTANCING] = true;
  2217. _sg_gl.features[SG_FEATURE_TEXTURE_FLOAT] = true;
  2218. _sg_gl.features[SG_FEATURE_TEXTURE_HALF_FLOAT] = true;
  2219. _sg_gl.features[SG_FEATURE_MSAA_RENDER_TARGETS] = true;
  2220. _sg_gl.features[SG_FEATURE_PACKED_VERTEX_FORMAT_10_2] = true;
  2221. _sg_gl.features[SG_FEATURE_MULTIPLE_RENDER_TARGET] = true;
  2222. _sg_gl.features[SG_FEATURE_IMAGETYPE_3D] = true;
  2223. _sg_gl.features[SG_FEATURE_IMAGETYPE_ARRAY] = true;
  2224. GLint num_ext = 0;
  2225. glGetIntegerv(GL_NUM_EXTENSIONS, &num_ext);
  2226. for (int i = 0; i < num_ext; i++) {
  2227. const char* ext = (const char*) glGetStringi(GL_EXTENSIONS, i);
  2228. if (strstr(ext, "_texture_compression_s3tc")) {
  2229. _sg_gl.features[SG_FEATURE_TEXTURE_COMPRESSION_DXT] = true;
  2230. continue;
  2231. }
  2232. else if (strstr(ext, "_texture_filter_anisotropic")) {
  2233. _sg_gl.ext_anisotropic = true;
  2234. continue;
  2235. }
  2236. }
  2237. #elif defined(SOKOL_GLES3)
  2238. _sg_gl.features[SG_FEATURE_INSTANCING] = true;
  2239. _sg_gl.features[SG_FEATURE_TEXTURE_FLOAT] = true;
  2240. _sg_gl.features[SG_FEATURE_TEXTURE_HALF_FLOAT] = true;
  2241. _sg_gl.features[SG_FEATURE_MSAA_RENDER_TARGETS] = true;
  2242. _sg_gl.features[SG_FEATURE_PACKED_VERTEX_FORMAT_10_2] = true;
  2243. _sg_gl.features[SG_FEATURE_MULTIPLE_RENDER_TARGET] = true;
  2244. _sg_gl.features[SG_FEATURE_IMAGETYPE_3D] = true;
  2245. _sg_gl.features[SG_FEATURE_IMAGETYPE_ARRAY] = true;
  2246. const char* ext = (const char*) glGetString(GL_EXTENSIONS);
  2247. _sg_gl.ext_anisotropic = strstr(ext, "_texture_filter_anisotropic");
  2248. _sg_gl.features[SG_FEATURE_TEXTURE_COMPRESSION_DXT] =
  2249. strstr(ext, "_texture_compression_s3tc") ||
  2250. strstr(ext, "_compressed_texture_s3tc") ||
  2251. strstr(ext, "texture_compression_dxt1");
  2252. _sg_gl.features[SG_FEATURE_TEXTURE_COMPRESSION_PVRTC] =
  2253. strstr(ext, "_texture_compression_pvrtc") ||
  2254. strstr(ext, "_compressed_texture_pvrtc");
  2255. _sg_gl.features[SG_FEATURE_TEXTURE_COMPRESSION_ATC] =
  2256. strstr(ext, "_compressed_texture_atc");
  2257. #elif defined(SOKOL_GLES2)
  2258. const char* ext = (const char*) glGetString(GL_EXTENSIONS);
  2259. _sg_gl.features[SG_FEATURE_INSTANCING] =
  2260. strstr(ext, "_instanced_arrays");
  2261. _sg_gl.features[SG_FEATURE_TEXTURE_FLOAT] =
  2262. strstr(ext, "_texture_float");
  2263. _sg_gl.features[SG_FEATURE_TEXTURE_HALF_FLOAT] =
  2264. strstr(ext, "_texture_half_float");
  2265. _sg_gl.features[SG_FEATURE_TEXTURE_COMPRESSION_DXT] =
  2266. strstr(ext, "_texture_compression_s3tc") ||
  2267. strstr(ext, "_compressed_texture_s3tc") ||
  2268. strstr(ext, "texture_compression_dxt1");
  2269. _sg_gl.features[SG_FEATURE_TEXTURE_COMPRESSION_PVRTC] =
  2270. strstr(ext, "_texture_compression_pvrtc") ||
  2271. strstr(ext, "_compressed_texture_pvrtc");
  2272. _sg_gl.features[SG_FEATURE_TEXTURE_COMPRESSION_ATC] =
  2273. strstr(ext, "_compressed_texture_atc");
  2274. _sg_gl.ext_anisotropic =
  2275. strstr(ext, "_texture_filter_anisotropic");
  2276. #endif
  2277. _sg_gl.max_anisotropy = 1;
  2278. if (_sg_gl.ext_anisotropic) {
  2279. glGetIntegerv(GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT, &_sg_gl.max_anisotropy);
  2280. }
  2281. }
  2282. _SOKOL_PRIVATE void _sg_discard_backend() {
  2283. SOKOL_ASSERT(_sg_gl.valid);
  2284. #if !defined(SOKOL_GLES2)
  2285. if (!_sg_gl_gles2) {
  2286. glDeleteVertexArrays(1, &_sg_gl.vao);
  2287. _sg_gl.vao = 0;
  2288. }
  2289. #endif
  2290. _sg_gl.valid = false;
  2291. }
  2292. _SOKOL_PRIVATE bool _sg_query_feature(sg_feature f) {
  2293. SOKOL_ASSERT((f>=0) && (f<SG_NUM_FEATURES));
  2294. return _sg_gl.features[f];
  2295. }
  2296. /*-- GL backend resource creation and destruction ----------------------------*/
  2297. _SOKOL_PRIVATE void _sg_create_buffer(_sg_buffer* buf, const sg_buffer_desc* desc) {
  2298. SOKOL_ASSERT(buf && desc);
  2299. SOKOL_ASSERT(buf->slot.state == SG_RESOURCESTATE_ALLOC);
  2300. _SG_GL_CHECK_ERROR();
  2301. buf->size = desc->size;
  2302. buf->type = _sg_def(desc->type, SG_BUFFERTYPE_VERTEXBUFFER);
  2303. buf->usage = _sg_def(desc->usage, SG_USAGE_IMMUTABLE);
  2304. buf->upd_frame_index = 0;
  2305. buf->num_slots = (buf->usage == SG_USAGE_IMMUTABLE) ? 1 : SG_NUM_INFLIGHT_FRAMES;
  2306. buf->active_slot = 0;
  2307. buf->ext_buffers = (0 != desc->gl_buffers[0]);
  2308. GLenum gl_target = _sg_gl_buffer_target(buf->type);
  2309. GLenum gl_usage = _sg_gl_usage(buf->usage);
  2310. for (int slot = 0; slot < buf->num_slots; slot++) {
  2311. GLuint gl_buf = 0;
  2312. if (buf->ext_buffers) {
  2313. SOKOL_ASSERT(desc->gl_buffers[slot]);
  2314. gl_buf = desc->gl_buffers[slot];
  2315. }
  2316. else {
  2317. glGenBuffers(1, &gl_buf);
  2318. glBindBuffer(gl_target, gl_buf);
  2319. glBufferData(gl_target, buf->size, 0, gl_usage);
  2320. if (buf->usage == SG_USAGE_IMMUTABLE) {
  2321. SOKOL_ASSERT(desc->content);
  2322. glBufferSubData(gl_target, 0, buf->size, desc->content);
  2323. }
  2324. }
  2325. buf->gl_buf[slot] = gl_buf;
  2326. }
  2327. _SG_GL_CHECK_ERROR();
  2328. buf->slot.state = SG_RESOURCESTATE_VALID;
  2329. }
  2330. _SOKOL_PRIVATE void _sg_destroy_buffer(_sg_buffer* buf) {
  2331. SOKOL_ASSERT(buf);
  2332. _SG_GL_CHECK_ERROR();
  2333. if (!buf->ext_buffers) {
  2334. for (int slot = 0; slot < buf->num_slots; slot++) {
  2335. if (buf->gl_buf[slot]) {
  2336. glDeleteBuffers(1, &buf->gl_buf[slot]);
  2337. }
  2338. }
  2339. _SG_GL_CHECK_ERROR();
  2340. }
  2341. _sg_init_buffer(buf);
  2342. }
  2343. _SOKOL_PRIVATE bool _sg_gl_supported_texture_format(sg_pixel_format fmt) {
  2344. switch (fmt) {
  2345. case SG_PIXELFORMAT_DXT1:
  2346. case SG_PIXELFORMAT_DXT3:
  2347. case SG_PIXELFORMAT_DXT5:
  2348. return _sg_gl.features[SG_FEATURE_TEXTURE_COMPRESSION_DXT];
  2349. case SG_PIXELFORMAT_PVRTC2_RGB:
  2350. case SG_PIXELFORMAT_PVRTC4_RGB:
  2351. case SG_PIXELFORMAT_PVRTC2_RGBA:
  2352. case SG_PIXELFORMAT_PVRTC4_RGBA:
  2353. return _sg_gl.features[SG_FEATURE_TEXTURE_COMPRESSION_PVRTC];
  2354. case SG_PIXELFORMAT_ETC2_RGB8:
  2355. case SG_PIXELFORMAT_ETC2_SRGB8:
  2356. return _sg_gl.features[SG_FEATURE_TEXTURE_COMPRESSION_ETC2];
  2357. default:
  2358. return true;
  2359. }
  2360. }
  2361. _SOKOL_PRIVATE void _sg_create_image(_sg_image* img, const sg_image_desc* desc) {
  2362. SOKOL_ASSERT(img && desc);
  2363. SOKOL_ASSERT(img->slot.state == SG_RESOURCESTATE_ALLOC);
  2364. _SG_GL_CHECK_ERROR();
  2365. img->type = _sg_def(desc->type, SG_IMAGETYPE_2D);
  2366. img->render_target = desc->render_target;
  2367. img->width = desc->width;
  2368. img->height = desc->height;
  2369. img->depth = _sg_def(desc->depth, 1);
  2370. img->num_mipmaps = _sg_def(desc->num_mipmaps, 1);
  2371. img->usage = _sg_def(desc->usage, SG_USAGE_IMMUTABLE);
  2372. img->pixel_format = _sg_def(desc->pixel_format, SG_PIXELFORMAT_RGBA8);
  2373. img->sample_count = _sg_def(desc->sample_count, 1);
  2374. img->min_filter = _sg_def(desc->min_filter, SG_FILTER_NEAREST);
  2375. img->mag_filter = _sg_def(desc->mag_filter, SG_FILTER_NEAREST);
  2376. img->wrap_u = _sg_def(desc->wrap_u, SG_WRAP_REPEAT);
  2377. img->wrap_v = _sg_def(desc->wrap_v, SG_WRAP_REPEAT);
  2378. img->wrap_w = _sg_def(desc->wrap_w, SG_WRAP_REPEAT);
  2379. img->max_anisotropy = _sg_def(desc->max_anisotropy, 1);
  2380. img->upd_frame_index = 0;
  2381. /* check if texture format is support */
  2382. if (!_sg_gl_supported_texture_format(img->pixel_format)) {
  2383. SOKOL_LOG("compressed texture format not supported by GL context\n");
  2384. img->slot.state = SG_RESOURCESTATE_FAILED;
  2385. return;
  2386. }
  2387. /* check for optional texture types */
  2388. if ((img->type == SG_IMAGETYPE_3D) && !_sg_gl.features[SG_FEATURE_IMAGETYPE_3D]) {
  2389. SOKOL_LOG("3D textures not supported by GL context\n");
  2390. img->slot.state = SG_RESOURCESTATE_FAILED;
  2391. return;
  2392. }
  2393. if ((img->type == SG_IMAGETYPE_ARRAY) && !_sg_gl.features[SG_FEATURE_IMAGETYPE_ARRAY]) {
  2394. SOKOL_LOG("array textures not supported by GL context\n");
  2395. img->slot.state = SG_RESOURCESTATE_FAILED;
  2396. return;
  2397. }
  2398. /* create 1 or 2 GL textures, depending on requested update strategy */
  2399. img->num_slots = (img->usage == SG_USAGE_IMMUTABLE) ? 1 : SG_NUM_INFLIGHT_FRAMES;
  2400. img->active_slot = 0;
  2401. img->ext_textures = (0 != desc->gl_textures[0]);
  2402. #if !defined(SOKOL_GLES2)
  2403. bool msaa = false;
  2404. if (!_sg_gl_gles2) {
  2405. msaa = (img->sample_count > 1) && (_sg_gl.features[SG_FEATURE_MSAA_RENDER_TARGETS]);
  2406. }
  2407. #endif
  2408. if (_sg_is_valid_rendertarget_depth_format(img->pixel_format)) {
  2409. /* special case depth-stencil-buffer? */
  2410. SOKOL_ASSERT((img->usage == SG_USAGE_IMMUTABLE) && (img->num_slots == 1));
  2411. SOKOL_ASSERT(!img->ext_textures); /* cannot provide external texture for depth images */
  2412. glGenRenderbuffers(1, &img->gl_depth_render_buffer);
  2413. glBindRenderbuffer(GL_RENDERBUFFER, img->gl_depth_render_buffer);
  2414. GLenum gl_depth_format = _sg_gl_depth_attachment_format(img->pixel_format);
  2415. #if !defined(SOKOL_GLES2)
  2416. if (msaa) {
  2417. glRenderbufferStorageMultisample(GL_RENDERBUFFER, img->sample_count, gl_depth_format, img->width, img->height);
  2418. }
  2419. else
  2420. #endif
  2421. {
  2422. glRenderbufferStorage(GL_RENDERBUFFER, gl_depth_format, img->width, img->height);
  2423. }
  2424. }
  2425. else {
  2426. /* regular color texture */
  2427. img->gl_target = _sg_gl_texture_target(img->type);
  2428. const GLenum gl_internal_format = _sg_gl_teximage_internal_format(img->pixel_format);
  2429. /* if this is a MSAA render target, need to create a separate render buffer */
  2430. #if !defined(SOKOL_GLES2)
  2431. if (img->render_target && msaa) {
  2432. glGenRenderbuffers(1, &img->gl_msaa_render_buffer);
  2433. glBindRenderbuffer(GL_RENDERBUFFER, img->gl_msaa_render_buffer);
  2434. glRenderbufferStorageMultisample(GL_RENDERBUFFER, img->sample_count, gl_internal_format, img->width, img->height);
  2435. }
  2436. #endif
  2437. if (img->ext_textures) {
  2438. /* inject externally GL textures */
  2439. for (int slot = 0; slot < img->num_slots; slot++) {
  2440. SOKOL_ASSERT(desc->gl_textures[slot]);
  2441. img->gl_tex[slot] = desc->gl_textures[slot];
  2442. }
  2443. }
  2444. else {
  2445. /* create our own GL texture(s) */
  2446. const GLenum gl_format = _sg_gl_teximage_format(img->pixel_format);
  2447. const bool is_compressed = _sg_is_compressed_pixel_format(img->pixel_format);
  2448. for (int slot = 0; slot < img->num_slots; slot++) {
  2449. glGenTextures(1, &img->gl_tex[slot]);
  2450. glActiveTexture(GL_TEXTURE0);
  2451. glBindTexture(img->gl_target, img->gl_tex[slot]);
  2452. GLenum gl_min_filter = _sg_gl_filter(img->min_filter);
  2453. GLenum gl_mag_filter = _sg_gl_filter(img->mag_filter);
  2454. glTexParameteri(img->gl_target, GL_TEXTURE_MIN_FILTER, gl_min_filter);
  2455. glTexParameteri(img->gl_target, GL_TEXTURE_MAG_FILTER, gl_mag_filter);
  2456. if (_sg_gl.ext_anisotropic && (img->max_anisotropy > 1)) {
  2457. GLint max_aniso = (GLint) img->max_anisotropy;
  2458. if (max_aniso > _sg_gl.max_anisotropy) {
  2459. max_aniso = _sg_gl.max_anisotropy;
  2460. }
  2461. glTexParameteri(img->gl_target, GL_TEXTURE_MAX_ANISOTROPY_EXT, max_aniso);
  2462. }
  2463. if (img->type == SG_IMAGETYPE_CUBE) {
  2464. glTexParameteri(img->gl_target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
  2465. glTexParameteri(img->gl_target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
  2466. }
  2467. else {
  2468. glTexParameteri(img->gl_target, GL_TEXTURE_WRAP_S, _sg_gl_wrap(img->wrap_u));
  2469. glTexParameteri(img->gl_target, GL_TEXTURE_WRAP_T, _sg_gl_wrap(img->wrap_v));
  2470. #if !defined(SOKOL_GLES2)
  2471. if (!_sg_gl_gles2 && (img->type == SG_IMAGETYPE_3D)) {
  2472. glTexParameteri(img->gl_target, GL_TEXTURE_WRAP_R, _sg_gl_wrap(img->wrap_w));
  2473. }
  2474. #endif
  2475. }
  2476. #if !defined(SOKOL_GLES2)
  2477. if (!_sg_gl_gles2) {
  2478. /* GL spec has strange defaults for mipmap min/max lod: -1000 to +1000 */
  2479. const float min_lod = _sg_clamp(desc->min_lod, 0.0f, 1000.0f);
  2480. const float max_lod = _sg_clamp(_sg_def_flt(desc->max_lod, 1000.0f), 0.0f, 1000.0f);
  2481. glTexParameterf(img->gl_target, GL_TEXTURE_MIN_LOD, min_lod);
  2482. glTexParameterf(img->gl_target, GL_TEXTURE_MAX_LOD, max_lod);
  2483. }
  2484. #endif
  2485. const int num_faces = img->type == SG_IMAGETYPE_CUBE ? 6 : 1;
  2486. int data_index = 0;
  2487. for (int face_index = 0; face_index < num_faces; face_index++) {
  2488. for (int mip_index = 0; mip_index < img->num_mipmaps; mip_index++, data_index++) {
  2489. GLenum gl_img_target = img->gl_target;
  2490. if (SG_IMAGETYPE_CUBE == img->type) {
  2491. gl_img_target = _sg_gl_cubeface_target(face_index);
  2492. }
  2493. const GLvoid* data_ptr = desc->content.subimage[face_index][mip_index].ptr;
  2494. const int data_size = desc->content.subimage[face_index][mip_index].size;
  2495. int mip_width = img->width >> mip_index;
  2496. if (mip_width == 0) {
  2497. mip_width = 1;
  2498. }
  2499. int mip_height = img->height >> mip_index;
  2500. if (mip_height == 0) {
  2501. mip_height = 1;
  2502. }
  2503. if ((SG_IMAGETYPE_2D == img->type) || (SG_IMAGETYPE_CUBE == img->type)) {
  2504. if (is_compressed) {
  2505. glCompressedTexImage2D(gl_img_target, mip_index, gl_internal_format,
  2506. mip_width, mip_height, 0, data_size, data_ptr);
  2507. }
  2508. else {
  2509. const GLenum gl_type = _sg_gl_teximage_type(img->pixel_format);
  2510. glTexImage2D(gl_img_target, mip_index, gl_internal_format,
  2511. mip_width, mip_height, 0, gl_format, gl_type, data_ptr);
  2512. }
  2513. }
  2514. #if !defined(SOKOL_GLES2)
  2515. else if (!_sg_gl_gles2 && ((SG_IMAGETYPE_3D == img->type) || (SG_IMAGETYPE_ARRAY == img->type))) {
  2516. int mip_depth = img->depth >> mip_index;
  2517. if (mip_depth == 0) {
  2518. mip_depth = 1;
  2519. }
  2520. if (is_compressed) {
  2521. glCompressedTexImage3D(gl_img_target, mip_index, gl_internal_format,
  2522. mip_width, mip_height, mip_depth, 0, data_size, data_ptr);
  2523. }
  2524. else {
  2525. const GLenum gl_type = _sg_gl_teximage_type(img->pixel_format);
  2526. glTexImage3D(gl_img_target, mip_index, gl_internal_format,
  2527. mip_width, mip_height, mip_depth, 0, gl_format, gl_type, data_ptr);
  2528. }
  2529. }
  2530. #endif
  2531. }
  2532. }
  2533. }
  2534. }
  2535. }
  2536. _SG_GL_CHECK_ERROR();
  2537. img->slot.state = SG_RESOURCESTATE_VALID;
  2538. }
  2539. _SOKOL_PRIVATE void _sg_destroy_image(_sg_image* img) {
  2540. SOKOL_ASSERT(img);
  2541. _SG_GL_CHECK_ERROR();
  2542. if (!img->ext_textures) {
  2543. for (int slot = 0; slot < img->num_slots; slot++) {
  2544. if (img->gl_tex[slot]) {
  2545. glDeleteTextures(1, &img->gl_tex[slot]);
  2546. }
  2547. }
  2548. }
  2549. if (img->gl_depth_render_buffer) {
  2550. glDeleteRenderbuffers(1, &img->gl_depth_render_buffer);
  2551. }
  2552. if (img->gl_msaa_render_buffer) {
  2553. glDeleteRenderbuffers(1, &img->gl_msaa_render_buffer);
  2554. }
  2555. _SG_GL_CHECK_ERROR();
  2556. _sg_init_image(img);
  2557. }
  2558. _SOKOL_PRIVATE GLuint _sg_gl_compile_shader(sg_shader_stage stage, const char* src) {
  2559. SOKOL_ASSERT(src);
  2560. _SG_GL_CHECK_ERROR();
  2561. GLuint gl_shd = glCreateShader(_sg_gl_shader_stage(stage));
  2562. glShaderSource(gl_shd, 1, &src, 0);
  2563. glCompileShader(gl_shd);
  2564. GLint compile_status = 0;
  2565. glGetShaderiv(gl_shd, GL_COMPILE_STATUS, &compile_status);
  2566. if (!compile_status) {
  2567. /* compilation failed, log error and delete shader */
  2568. GLint log_len = 0;
  2569. glGetShaderiv(gl_shd, GL_INFO_LOG_LENGTH, &log_len);
  2570. if (log_len > 0) {
  2571. GLchar* log_buf = (GLchar*) SOKOL_MALLOC(log_len);
  2572. glGetShaderInfoLog(gl_shd, log_len, &log_len, log_buf);
  2573. SOKOL_LOG(log_buf);
  2574. SOKOL_FREE(log_buf);
  2575. }
  2576. glDeleteShader(gl_shd);
  2577. gl_shd = 0;
  2578. }
  2579. _SG_GL_CHECK_ERROR();
  2580. return gl_shd;
  2581. }
  2582. _SOKOL_PRIVATE void _sg_create_shader(_sg_shader* shd, const sg_shader_desc* desc) {
  2583. SOKOL_ASSERT(shd && desc);
  2584. SOKOL_ASSERT(shd->slot.state == SG_RESOURCESTATE_ALLOC);
  2585. SOKOL_ASSERT(!shd->gl_prog);
  2586. _SG_GL_CHECK_ERROR();
  2587. GLuint gl_vs = _sg_gl_compile_shader(SG_SHADERSTAGE_VS, desc->vs.source);
  2588. GLuint gl_fs = _sg_gl_compile_shader(SG_SHADERSTAGE_FS, desc->fs.source);
  2589. if (!(gl_vs && gl_fs)) {
  2590. shd->slot.state = SG_RESOURCESTATE_FAILED;
  2591. return;
  2592. }
  2593. GLuint gl_prog = glCreateProgram();
  2594. glAttachShader(gl_prog, gl_vs);
  2595. glAttachShader(gl_prog, gl_fs);
  2596. glLinkProgram(gl_prog);
  2597. glDeleteShader(gl_vs);
  2598. glDeleteShader(gl_fs);
  2599. _SG_GL_CHECK_ERROR();
  2600. GLint link_status;
  2601. glGetProgramiv(gl_prog, GL_LINK_STATUS, &link_status);
  2602. if (!link_status) {
  2603. GLint log_len = 0;
  2604. glGetProgramiv(gl_prog, GL_INFO_LOG_LENGTH, &log_len);
  2605. if (log_len > 0) {
  2606. GLchar* log_buf = (GLchar*) SOKOL_MALLOC(log_len);
  2607. glGetProgramInfoLog(gl_prog, log_len, &log_len, log_buf);
  2608. SOKOL_LOG(log_buf);
  2609. SOKOL_FREE(log_buf);
  2610. }
  2611. glDeleteProgram(gl_prog);
  2612. shd->slot.state = SG_RESOURCESTATE_FAILED;
  2613. return;
  2614. }
  2615. shd->gl_prog = gl_prog;
  2616. /* resolve uniforms */
  2617. _SG_GL_CHECK_ERROR();
  2618. for (int stage_index = 0; stage_index < SG_NUM_SHADER_STAGES; stage_index++) {
  2619. const sg_shader_stage_desc* stage_desc = (stage_index == SG_SHADERSTAGE_VS)? &desc->vs : &desc->fs;
  2620. _sg_shader_stage* stage = &shd->stage[stage_index];
  2621. SOKOL_ASSERT(stage->num_uniform_blocks == 0);
  2622. for (int ub_index = 0; ub_index < SG_MAX_SHADERSTAGE_UBS; ub_index++) {
  2623. const sg_shader_uniform_block_desc* ub_desc = &stage_desc->uniform_blocks[ub_index];
  2624. if (0 == ub_desc->size) {
  2625. break;
  2626. }
  2627. _sg_uniform_block* ub = &stage->uniform_blocks[ub_index];
  2628. ub->size = ub_desc->size;
  2629. SOKOL_ASSERT(ub->num_uniforms == 0);
  2630. int cur_uniform_offset = 0;
  2631. for (int u_index = 0; u_index < SG_MAX_UB_MEMBERS; u_index++) {
  2632. const sg_shader_uniform_desc* u_desc = &ub_desc->uniforms[u_index];
  2633. if (u_desc->type == SG_UNIFORMTYPE_INVALID) {
  2634. break;
  2635. }
  2636. _sg_uniform* u = &ub->uniforms[u_index];
  2637. u->type = u_desc->type;
  2638. u->count = _sg_def(u_desc->array_count, 1);
  2639. u->offset = cur_uniform_offset;
  2640. cur_uniform_offset += _sg_uniform_size(u->type, u->count);
  2641. if (u_desc->name) {
  2642. u->gl_loc = glGetUniformLocation(gl_prog, u_desc->name);
  2643. }
  2644. else {
  2645. u->gl_loc = u_index;
  2646. }
  2647. ub->num_uniforms++;
  2648. }
  2649. SOKOL_ASSERT(ub_desc->size == cur_uniform_offset);
  2650. stage->num_uniform_blocks++;
  2651. }
  2652. }
  2653. /* resolve image locations */
  2654. _SG_GL_CHECK_ERROR();
  2655. int gl_tex_slot = 0;
  2656. for (int stage_index = 0; stage_index < SG_NUM_SHADER_STAGES; stage_index++) {
  2657. const sg_shader_stage_desc* stage_desc = (stage_index == SG_SHADERSTAGE_VS)? &desc->vs : &desc->fs;
  2658. _sg_shader_stage* stage = &shd->stage[stage_index];
  2659. SOKOL_ASSERT(stage->num_images == 0);
  2660. for (int img_index = 0; img_index < SG_MAX_SHADERSTAGE_IMAGES; img_index++) {
  2661. const sg_shader_image_desc* img_desc = &stage_desc->images[img_index];
  2662. if (img_desc->type == _SG_IMAGETYPE_DEFAULT) {
  2663. break;
  2664. }
  2665. _sg_shader_image* img = &stage->images[img_index];
  2666. img->type = img_desc->type;
  2667. img->gl_loc = img_index;
  2668. if (img_desc->name) {
  2669. img->gl_loc = glGetUniformLocation(gl_prog, img_desc->name);
  2670. }
  2671. if (img->gl_loc != -1) {
  2672. img->gl_tex_slot = gl_tex_slot++;
  2673. }
  2674. else {
  2675. img->gl_tex_slot = -1;
  2676. }
  2677. stage->num_images++;
  2678. }
  2679. }
  2680. _SG_GL_CHECK_ERROR();
  2681. shd->slot.state = SG_RESOURCESTATE_VALID;
  2682. }
  2683. _SOKOL_PRIVATE void _sg_destroy_shader(_sg_shader* shd) {
  2684. SOKOL_ASSERT(shd);
  2685. _SG_GL_CHECK_ERROR();
  2686. if (shd->gl_prog) {
  2687. glDeleteProgram(shd->gl_prog);
  2688. }
  2689. _SG_GL_CHECK_ERROR();
  2690. _sg_init_shader(shd);
  2691. }
  2692. _SOKOL_PRIVATE void _sg_gl_load_stencil(const sg_stencil_state* src, sg_stencil_state* dst) {
  2693. dst->fail_op = _sg_def(src->fail_op, SG_STENCILOP_KEEP);
  2694. dst->depth_fail_op = _sg_def(src->depth_fail_op, SG_STENCILOP_KEEP);
  2695. dst->pass_op = _sg_def(src->pass_op, SG_STENCILOP_KEEP);
  2696. dst->compare_func = _sg_def(src->compare_func, SG_COMPAREFUNC_ALWAYS);
  2697. }
  2698. _SOKOL_PRIVATE void _sg_gl_load_depth_stencil(const sg_depth_stencil_state* src, sg_depth_stencil_state* dst) {
  2699. _sg_gl_load_stencil(&src->stencil_front, &dst->stencil_front);
  2700. _sg_gl_load_stencil(&src->stencil_back, &dst->stencil_back);
  2701. dst->depth_compare_func = _sg_def(src->depth_compare_func, SG_COMPAREFUNC_ALWAYS);
  2702. dst->depth_write_enabled = src->depth_write_enabled;
  2703. dst->stencil_enabled = src->stencil_enabled;
  2704. dst->stencil_read_mask = src->stencil_read_mask;
  2705. dst->stencil_write_mask = src->stencil_write_mask;
  2706. dst->stencil_ref = src->stencil_ref;
  2707. }
  2708. _SOKOL_PRIVATE void _sg_gl_load_blend(const sg_blend_state* src, sg_blend_state* dst) {
  2709. dst->enabled = src->enabled;
  2710. dst->src_factor_rgb = _sg_def(src->src_factor_rgb, SG_BLENDFACTOR_ONE);
  2711. dst->dst_factor_rgb = _sg_def(src->dst_factor_rgb, SG_BLENDFACTOR_ZERO);
  2712. dst->op_rgb = _sg_def(src->op_rgb, SG_BLENDOP_ADD);
  2713. dst->src_factor_alpha = _sg_def(src->src_factor_alpha, SG_BLENDFACTOR_ONE);
  2714. dst->dst_factor_alpha = _sg_def(src->dst_factor_alpha, SG_BLENDFACTOR_ZERO);
  2715. dst->op_alpha = _sg_def(src->op_alpha, SG_BLENDOP_ADD);
  2716. if (src->color_write_mask == SG_COLORMASK_NONE) {
  2717. dst->color_write_mask = 0;
  2718. }
  2719. else {
  2720. dst->color_write_mask = _sg_def((sg_color_mask)src->color_write_mask, SG_COLORMASK_RGBA);
  2721. }
  2722. for (int i = 0; i < 4; i++) {
  2723. dst->blend_color[i] = src->blend_color[i];
  2724. }
  2725. }
  2726. _SOKOL_PRIVATE void _sg_gl_load_rasterizer(const sg_rasterizer_state* src, sg_rasterizer_state* dst) {
  2727. dst->alpha_to_coverage_enabled = src->alpha_to_coverage_enabled;
  2728. dst->cull_mode = _sg_def(src->cull_mode, SG_CULLMODE_NONE);
  2729. dst->face_winding = _sg_def(src->face_winding, SG_FACEWINDING_CW);
  2730. dst->sample_count = _sg_def(src->sample_count, 1);
  2731. dst->depth_bias = src->depth_bias;
  2732. dst->depth_bias_slope_scale = src->depth_bias_slope_scale;
  2733. dst->depth_bias_clamp = src->depth_bias_clamp;
  2734. }
  2735. _SOKOL_PRIVATE void _sg_create_pipeline(_sg_pipeline* pip, _sg_shader* shd, const sg_pipeline_desc* desc) {
  2736. SOKOL_ASSERT(pip && shd && desc);
  2737. SOKOL_ASSERT(pip->slot.state == SG_RESOURCESTATE_ALLOC);
  2738. SOKOL_ASSERT(!pip->shader && pip->shader_id.id == SG_INVALID_ID);
  2739. SOKOL_ASSERT(desc->shader.id == shd->slot.id);
  2740. SOKOL_ASSERT(shd->gl_prog);
  2741. pip->shader = shd;
  2742. pip->shader_id = desc->shader;
  2743. pip->primitive_type = _sg_def(desc->primitive_type, SG_PRIMITIVETYPE_TRIANGLES);
  2744. pip->index_type = _sg_def(desc->index_type, SG_INDEXTYPE_NONE);
  2745. pip->color_attachment_count = _sg_def(desc->blend.color_attachment_count, 1);
  2746. pip->color_format = _sg_def(desc->blend.color_format, SG_PIXELFORMAT_RGBA8);
  2747. pip->depth_format = _sg_def(desc->blend.depth_format, SG_PIXELFORMAT_DEPTHSTENCIL);
  2748. pip->sample_count = _sg_def(desc->rasterizer.sample_count, 1);
  2749. _sg_gl_load_depth_stencil(&desc->depth_stencil, &pip->depth_stencil);
  2750. _sg_gl_load_blend(&desc->blend, &pip->blend);
  2751. _sg_gl_load_rasterizer(&desc->rasterizer, &pip->rast);
  2752. /* resolve vertex attributes */
  2753. int auto_offset[SG_MAX_SHADERSTAGE_BUFFERS];
  2754. for (int layout_index = 0; layout_index < SG_MAX_SHADERSTAGE_BUFFERS; layout_index++) {
  2755. auto_offset[layout_index] = 0;
  2756. }
  2757. bool use_auto_offset = true;
  2758. for (int attr_index = 0; attr_index < SG_MAX_VERTEX_ATTRIBUTES; attr_index++) {
  2759. pip->gl_attrs[attr_index].vb_index = -1;
  2760. /* to use computed offsets, *all* attr offsets must be 0 */
  2761. if (desc->layout.attrs[attr_index].offset != 0) {
  2762. use_auto_offset = false;
  2763. }
  2764. }
  2765. for (int attr_index = 0; attr_index < SG_MAX_VERTEX_ATTRIBUTES; attr_index++) {
  2766. const sg_vertex_attr_desc* a_desc = &desc->layout.attrs[attr_index];
  2767. if (a_desc->format == SG_VERTEXFORMAT_INVALID) {
  2768. break;
  2769. }
  2770. SOKOL_ASSERT((a_desc->buffer_index >= 0) && (a_desc->buffer_index < SG_MAX_SHADERSTAGE_BUFFERS));
  2771. const sg_buffer_layout_desc* l_desc = &desc->layout.buffers[a_desc->buffer_index];
  2772. const sg_vertex_step step_func = _sg_def(l_desc->step_func, SG_VERTEXSTEP_PER_VERTEX);
  2773. const int step_rate = _sg_def(l_desc->step_rate, 1);
  2774. GLint attr_loc = attr_index;
  2775. if (a_desc->name) {
  2776. attr_loc = glGetAttribLocation(pip->shader->gl_prog, a_desc->name);
  2777. }
  2778. SOKOL_ASSERT(attr_loc < SG_MAX_VERTEX_ATTRIBUTES);
  2779. if (attr_loc != -1) {
  2780. _sg_gl_attr* gl_attr = &pip->gl_attrs[attr_loc];
  2781. SOKOL_ASSERT(gl_attr->vb_index == -1);
  2782. gl_attr->vb_index = a_desc->buffer_index;
  2783. if (step_func == SG_VERTEXSTEP_PER_VERTEX) {
  2784. gl_attr->divisor = 0;
  2785. }
  2786. else {
  2787. gl_attr->divisor = step_rate;
  2788. }
  2789. gl_attr->stride = l_desc->stride;
  2790. gl_attr->offset = use_auto_offset ? auto_offset[a_desc->buffer_index] : a_desc->offset;
  2791. gl_attr->size = _sg_gl_vertexformat_size(a_desc->format);
  2792. gl_attr->type = _sg_gl_vertexformat_type(a_desc->format);
  2793. gl_attr->normalized = _sg_gl_vertexformat_normalized(a_desc->format);
  2794. pip->vertex_layout_valid[a_desc->buffer_index] = true;
  2795. }
  2796. else {
  2797. SOKOL_LOG("Vertex attribute not found in shader: ");
  2798. SOKOL_LOG(a_desc->name);
  2799. }
  2800. auto_offset[a_desc->buffer_index] += _sg_vertexformat_bytesize(a_desc->format);
  2801. }
  2802. /* fill computed vertex strides that haven't been explicitely provided */
  2803. for (int attr_index = 0; attr_index < SG_MAX_VERTEX_ATTRIBUTES; attr_index++) {
  2804. _sg_gl_attr* gl_attr = &pip->gl_attrs[attr_index];
  2805. if ((gl_attr->vb_index != -1) && (0 == gl_attr->stride)) {
  2806. gl_attr->stride = auto_offset[gl_attr->vb_index];
  2807. }
  2808. }
  2809. pip->slot.state = SG_RESOURCESTATE_VALID;
  2810. }
  2811. _SOKOL_PRIVATE void _sg_destroy_pipeline(_sg_pipeline* pip) {
  2812. SOKOL_ASSERT(pip);
  2813. _sg_init_pipeline(pip);
  2814. }
  2815. /*
  2816. _sg_create_pass
  2817. att_imgs must point to a _sg_image* att_imgs[SG_MAX_COLOR_ATTACHMENTS+1] array,
  2818. first entries are the color attachment images (or nullptr), last entry
  2819. is the depth-stencil image (or nullptr).
  2820. */
  2821. _SOKOL_PRIVATE void _sg_create_pass(_sg_pass* pass, _sg_image** att_images, const sg_pass_desc* desc) {
  2822. SOKOL_ASSERT(pass && att_images && desc);
  2823. SOKOL_ASSERT(pass->slot.state == SG_RESOURCESTATE_ALLOC);
  2824. SOKOL_ASSERT(att_images && att_images[0]);
  2825. _SG_GL_CHECK_ERROR();
  2826. /* copy image pointers and desc attributes */
  2827. const sg_attachment_desc* att_desc;
  2828. _sg_attachment* att;
  2829. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  2830. SOKOL_ASSERT(0 == pass->color_atts[i].image);
  2831. att_desc = &desc->color_attachments[i];
  2832. if (att_desc->image.id != SG_INVALID_ID) {
  2833. pass->num_color_atts++;
  2834. SOKOL_ASSERT(att_images[i] && (att_images[i]->slot.id == att_desc->image.id));
  2835. SOKOL_ASSERT(_sg_is_valid_rendertarget_color_format(att_images[i]->pixel_format));
  2836. att = &pass->color_atts[i];
  2837. SOKOL_ASSERT((att->image == 0) && (att->image_id.id == SG_INVALID_ID));
  2838. att->image = att_images[i];
  2839. att->image_id = att_desc->image;
  2840. att->mip_level = att_desc->mip_level;
  2841. att->slice = att_desc->slice;
  2842. }
  2843. }
  2844. SOKOL_ASSERT(0 == pass->ds_att.image);
  2845. att_desc = &desc->depth_stencil_attachment;
  2846. const int ds_img_index = SG_MAX_COLOR_ATTACHMENTS;
  2847. if (att_desc->image.id != SG_INVALID_ID) {
  2848. SOKOL_ASSERT(att_images[ds_img_index] && (att_images[ds_img_index]->slot.id == att_desc->image.id));
  2849. SOKOL_ASSERT(_sg_is_valid_rendertarget_depth_format(att_images[ds_img_index]->pixel_format));
  2850. att = &pass->ds_att;
  2851. SOKOL_ASSERT((att->image == 0) && (att->image_id.id == SG_INVALID_ID));
  2852. att->image = att_images[ds_img_index];
  2853. att->image_id = att_desc->image;
  2854. att->mip_level = att_desc->mip_level;
  2855. att->slice = att_desc->slice;
  2856. }
  2857. /* store current framebuffer binding (restored at end of function) */
  2858. GLuint gl_orig_fb;
  2859. glGetIntegerv(GL_FRAMEBUFFER_BINDING, (GLint*)&gl_orig_fb);
  2860. /* create a framebuffer object */
  2861. glGenFramebuffers(1, &pass->gl_fb);
  2862. glBindFramebuffer(GL_FRAMEBUFFER, pass->gl_fb);
  2863. /* attach msaa render buffer or textures */
  2864. const bool is_msaa = (0 != att_images[0]->gl_msaa_render_buffer);
  2865. if (is_msaa) {
  2866. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  2867. const _sg_image* att_img = pass->color_atts[i].image;
  2868. if (att_img) {
  2869. const GLuint gl_render_buffer = att_img->gl_msaa_render_buffer;
  2870. SOKOL_ASSERT(gl_render_buffer);
  2871. glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0+i, GL_RENDERBUFFER, gl_render_buffer);
  2872. }
  2873. }
  2874. }
  2875. else {
  2876. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  2877. const _sg_image* att_img = pass->color_atts[i].image;
  2878. const int mip_level = pass->color_atts[i].mip_level;
  2879. const int slice = pass->color_atts[i].slice;
  2880. if (att_img) {
  2881. const GLuint gl_tex = att_img->gl_tex[0];
  2882. SOKOL_ASSERT(gl_tex);
  2883. const GLenum gl_att = GL_COLOR_ATTACHMENT0 + i;
  2884. switch (att_img->type) {
  2885. case SG_IMAGETYPE_2D:
  2886. glFramebufferTexture2D(GL_FRAMEBUFFER, gl_att, GL_TEXTURE_2D, gl_tex, mip_level);
  2887. break;
  2888. case SG_IMAGETYPE_CUBE:
  2889. glFramebufferTexture2D(GL_FRAMEBUFFER, gl_att, _sg_gl_cubeface_target(slice), gl_tex, mip_level);
  2890. break;
  2891. default:
  2892. /* 3D- or array-texture */
  2893. #if !defined(SOKOL_GLES2)
  2894. if (!_sg_gl_gles2) {
  2895. glFramebufferTextureLayer(GL_FRAMEBUFFER, gl_att, gl_tex, mip_level, slice);
  2896. }
  2897. #endif
  2898. break;
  2899. }
  2900. }
  2901. }
  2902. }
  2903. /* attach depth-stencil buffer to framebuffer */
  2904. if (pass->ds_att.image) {
  2905. const GLuint gl_render_buffer = pass->ds_att.image->gl_depth_render_buffer;
  2906. SOKOL_ASSERT(gl_render_buffer);
  2907. glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, gl_render_buffer);
  2908. if (_sg_is_depth_stencil_format(pass->ds_att.image->pixel_format)) {
  2909. glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_STENCIL_ATTACHMENT, GL_RENDERBUFFER, gl_render_buffer);
  2910. }
  2911. }
  2912. /* check if framebuffer is complete */
  2913. if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
  2914. SOKOL_LOG("Framebuffer completeness check failed!\n");
  2915. pass->slot.state = SG_RESOURCESTATE_FAILED;
  2916. return;
  2917. }
  2918. /* create MSAA resolve framebuffers if necessary */
  2919. if (is_msaa) {
  2920. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  2921. _sg_attachment* att = &pass->color_atts[i];
  2922. if (att->image) {
  2923. SOKOL_ASSERT(0 == att->gl_msaa_resolve_buffer);
  2924. glGenFramebuffers(1, &att->gl_msaa_resolve_buffer);
  2925. glBindFramebuffer(GL_FRAMEBUFFER, att->gl_msaa_resolve_buffer);
  2926. const GLuint gl_tex = att->image->gl_tex[0];
  2927. SOKOL_ASSERT(gl_tex);
  2928. switch (att->image->type) {
  2929. case SG_IMAGETYPE_2D:
  2930. glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0,
  2931. GL_TEXTURE_2D, gl_tex, att->mip_level);
  2932. break;
  2933. case SG_IMAGETYPE_CUBE:
  2934. glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0,
  2935. _sg_gl_cubeface_target(att->slice), gl_tex, att->mip_level);
  2936. break;
  2937. default:
  2938. #if !defined(SOKOL_GLES2)
  2939. if (!_sg_gl_gles2) {
  2940. glFramebufferTextureLayer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, gl_tex, att->mip_level, att->slice);
  2941. }
  2942. #endif
  2943. break;
  2944. }
  2945. /* check if framebuffer is complete */
  2946. if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
  2947. SOKOL_LOG("Framebuffer completeness check failed (msaa resolve buffer)!\n");
  2948. pass->slot.state = SG_RESOURCESTATE_FAILED;
  2949. return;
  2950. }
  2951. }
  2952. }
  2953. }
  2954. /* restore original framebuffer binding */
  2955. glBindFramebuffer(GL_FRAMEBUFFER, gl_orig_fb);
  2956. _SG_GL_CHECK_ERROR();
  2957. pass->slot.state = SG_RESOURCESTATE_VALID;
  2958. }
  2959. _SOKOL_PRIVATE void _sg_destroy_pass(_sg_pass* pass) {
  2960. SOKOL_ASSERT(pass);
  2961. _SG_GL_CHECK_ERROR();
  2962. if (0 != pass->gl_fb) {
  2963. glDeleteFramebuffers(1, &pass->gl_fb);
  2964. }
  2965. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  2966. if (pass->color_atts[i].gl_msaa_resolve_buffer) {
  2967. glDeleteFramebuffers(1, &pass->color_atts[i].gl_msaa_resolve_buffer);
  2968. }
  2969. }
  2970. if (pass->ds_att.gl_msaa_resolve_buffer) {
  2971. glDeleteFramebuffers(1, &pass->ds_att.gl_msaa_resolve_buffer);
  2972. }
  2973. _SG_GL_CHECK_ERROR();
  2974. _sg_init_pass(pass);
  2975. }
  2976. /*-- GL backend rendering functions ------------------------------------------*/
  2977. _SOKOL_PRIVATE void _sg_begin_pass(_sg_pass* pass, const sg_pass_action* action, int w, int h) {
  2978. /* FIXME: what if a texture used as render target is still bound, should we
  2979. unbind all currently bound textures in begin pass? */
  2980. SOKOL_ASSERT(action);
  2981. SOKOL_ASSERT(!_sg_gl.in_pass);
  2982. _SG_GL_CHECK_ERROR();
  2983. _sg_gl.in_pass = true;
  2984. _sg_gl.cur_pass = pass; /* can be 0 */
  2985. if (pass) {
  2986. _sg_gl.cur_pass_id.id = pass->slot.id;
  2987. }
  2988. else {
  2989. _sg_gl.cur_pass_id.id = SG_INVALID_ID;
  2990. }
  2991. _sg_gl.cur_pass_width = w;
  2992. _sg_gl.cur_pass_height = h;
  2993. if (pass) {
  2994. /* offscreen pass */
  2995. SOKOL_ASSERT(pass->gl_fb);
  2996. glBindFramebuffer(GL_FRAMEBUFFER, pass->gl_fb);
  2997. #if !defined(SOKOL_GLES2)
  2998. if (!_sg_gl_gles2) {
  2999. GLenum att[SG_MAX_COLOR_ATTACHMENTS] = {
  3000. GL_COLOR_ATTACHMENT0,
  3001. GL_COLOR_ATTACHMENT1,
  3002. GL_COLOR_ATTACHMENT2,
  3003. GL_COLOR_ATTACHMENT3
  3004. };
  3005. int num_attrs = 0;
  3006. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  3007. if (pass->color_atts[num_attrs].image) {
  3008. num_attrs++;
  3009. }
  3010. else {
  3011. break;
  3012. }
  3013. }
  3014. glDrawBuffers(num_attrs, att);
  3015. }
  3016. #endif
  3017. }
  3018. else {
  3019. /* default pass */
  3020. glBindFramebuffer(GL_FRAMEBUFFER, _sg_gl.default_framebuffer);
  3021. }
  3022. glViewport(0, 0, w, h);
  3023. glScissor(0, 0, w, h);
  3024. bool need_pip_cache_flush = false;
  3025. if (_sg_gl.cache.blend.color_write_mask != SG_COLORMASK_RGBA) {
  3026. need_pip_cache_flush = true;
  3027. _sg_gl.cache.blend.color_write_mask = SG_COLORMASK_RGBA;
  3028. glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);
  3029. }
  3030. if (!_sg_gl.cache.ds.depth_write_enabled) {
  3031. need_pip_cache_flush = true;
  3032. _sg_gl.cache.ds.depth_write_enabled = true;
  3033. glDepthMask(GL_TRUE);
  3034. }
  3035. if (_sg_gl.cache.ds.stencil_write_mask != 0xFF) {
  3036. need_pip_cache_flush = true;
  3037. _sg_gl.cache.ds.stencil_write_mask = 0xFF;
  3038. glStencilMask(0xFF);
  3039. }
  3040. if (need_pip_cache_flush) {
  3041. /* we messed with the state cache directly, need to clear cached
  3042. pipeline to force re-evaluation in next sg_apply_draw_state() */
  3043. _sg_gl.cache.cur_pipeline = 0;
  3044. _sg_gl.cache.cur_pipeline_id.id = SG_INVALID_ID;
  3045. }
  3046. bool use_mrt_clear = (0 != pass);
  3047. #if defined(SOKOL_GLES2)
  3048. use_mrt_clear = false;
  3049. #else
  3050. if (_sg_gl_gles2) {
  3051. use_mrt_clear = false;
  3052. }
  3053. #endif
  3054. if (!use_mrt_clear) {
  3055. GLbitfield clear_mask = 0;
  3056. if (action->colors[0].action == SG_ACTION_CLEAR) {
  3057. clear_mask |= GL_COLOR_BUFFER_BIT;
  3058. const float* c = action->colors[0].val;
  3059. glClearColor(c[0], c[1], c[2], c[3]);
  3060. }
  3061. if (action->depth.action == SG_ACTION_CLEAR) {
  3062. clear_mask |= GL_DEPTH_BUFFER_BIT;
  3063. #ifdef SOKOL_GLCORE33
  3064. glClearDepth(action->depth.val);
  3065. #else
  3066. glClearDepthf(action->depth.val);
  3067. #endif
  3068. }
  3069. if (action->stencil.action == SG_ACTION_CLEAR) {
  3070. clear_mask |= GL_STENCIL_BUFFER_BIT;
  3071. glClearStencil(action->stencil.val);
  3072. }
  3073. if (0 != clear_mask) {
  3074. glClear(clear_mask);
  3075. }
  3076. }
  3077. #if !defined SOKOL_GLES2
  3078. else {
  3079. SOKOL_ASSERT(pass);
  3080. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  3081. if (pass->color_atts[i].image) {
  3082. if (action->colors[i].action == SG_ACTION_CLEAR) {
  3083. glClearBufferfv(GL_COLOR, i, action->colors[i].val);
  3084. }
  3085. }
  3086. else {
  3087. break;
  3088. }
  3089. }
  3090. if (pass->ds_att.image) {
  3091. if ((action->depth.action == SG_ACTION_CLEAR) && (action->stencil.action == SG_ACTION_CLEAR)) {
  3092. glClearBufferfi(GL_DEPTH_STENCIL, 0, action->depth.val, action->stencil.val);
  3093. }
  3094. else if (action->depth.action == SG_ACTION_CLEAR) {
  3095. glClearBufferfv(GL_DEPTH, 0, &action->depth.val);
  3096. }
  3097. else if (action->stencil.action == SG_ACTION_CLEAR) {
  3098. GLuint val = action->stencil.val;
  3099. glClearBufferuiv(GL_STENCIL, 0, &val);
  3100. }
  3101. }
  3102. }
  3103. #endif
  3104. _SG_GL_CHECK_ERROR();
  3105. }
  3106. _SOKOL_PRIVATE void _sg_end_pass() {
  3107. SOKOL_ASSERT(_sg_gl.in_pass);
  3108. _SG_GL_CHECK_ERROR();
  3109. /* if this was an offscreen pass, and MSAA rendering was used, need
  3110. to resolve into the pass images */
  3111. #if !defined(SOKOL_GLES2)
  3112. if (!_sg_gl_gles2 && _sg_gl.cur_pass) {
  3113. /* check if the pass object is still valid */
  3114. const _sg_pass* pass = _sg_gl.cur_pass;
  3115. SOKOL_ASSERT(pass->slot.id == _sg_gl.cur_pass_id.id);
  3116. bool is_msaa = (0 != _sg_gl.cur_pass->color_atts[0].gl_msaa_resolve_buffer);
  3117. if (is_msaa) {
  3118. SOKOL_ASSERT(pass->gl_fb);
  3119. glBindFramebuffer(GL_READ_FRAMEBUFFER, pass->gl_fb);
  3120. SOKOL_ASSERT(pass->color_atts[0].image);
  3121. const int w = pass->color_atts[0].image->width;
  3122. const int h = pass->color_atts[0].image->height;
  3123. for (int att_index = 0; att_index < SG_MAX_COLOR_ATTACHMENTS; att_index++) {
  3124. const _sg_attachment* att = &pass->color_atts[att_index];
  3125. if (att->image) {
  3126. SOKOL_ASSERT(att->gl_msaa_resolve_buffer);
  3127. glBindFramebuffer(GL_DRAW_FRAMEBUFFER, att->gl_msaa_resolve_buffer);
  3128. glReadBuffer(GL_COLOR_ATTACHMENT0 + att_index);
  3129. const GLenum gl_att = GL_COLOR_ATTACHMENT0;
  3130. glDrawBuffers(1, &gl_att);
  3131. glBlitFramebuffer(0, 0, w, h, 0, 0, w, h, GL_COLOR_BUFFER_BIT, GL_NEAREST);
  3132. }
  3133. else {
  3134. break;
  3135. }
  3136. }
  3137. }
  3138. }
  3139. #endif
  3140. _sg_gl.cur_pass = 0;
  3141. _sg_gl.cur_pass_id.id = SG_INVALID_ID;
  3142. _sg_gl.cur_pass_width = 0;
  3143. _sg_gl.cur_pass_height = 0;
  3144. glBindFramebuffer(GL_FRAMEBUFFER, _sg_gl.default_framebuffer);
  3145. _sg_gl.in_pass = false;
  3146. _SG_GL_CHECK_ERROR();
  3147. }
  3148. _SOKOL_PRIVATE void _sg_apply_viewport(int x, int y, int w, int h, bool origin_top_left) {
  3149. SOKOL_ASSERT(_sg_gl.in_pass);
  3150. y = origin_top_left ? (_sg_gl.cur_pass_height - (y+h)) : y;
  3151. glViewport(x, y, w, h);
  3152. }
  3153. _SOKOL_PRIVATE void _sg_apply_scissor_rect(int x, int y, int w, int h, bool origin_top_left) {
  3154. SOKOL_ASSERT(_sg_gl.in_pass);
  3155. y = origin_top_left ? (_sg_gl.cur_pass_height - (y+h)) : y;
  3156. glScissor(x, y, w, h);
  3157. }
  3158. _SOKOL_PRIVATE void _sg_apply_draw_state(
  3159. _sg_pipeline* pip,
  3160. _sg_buffer** vbs, int num_vbs, _sg_buffer* ib,
  3161. _sg_image** vs_imgs, int num_vs_imgs,
  3162. _sg_image** fs_imgs, int num_fs_imgs)
  3163. {
  3164. SOKOL_ASSERT(pip);
  3165. SOKOL_ASSERT(pip->shader);
  3166. _SG_GL_CHECK_ERROR();
  3167. /* need to apply pipeline state? */
  3168. if ((_sg_gl.cache.cur_pipeline != pip) || (_sg_gl.cache.cur_pipeline_id.id != pip->slot.id)) {
  3169. _sg_gl.cache.cur_pipeline = pip;
  3170. _sg_gl.cache.cur_pipeline_id.id = pip->slot.id;
  3171. _sg_gl.cache.cur_primitive_type = _sg_gl_primitive_type(pip->primitive_type);
  3172. _sg_gl.cache.cur_index_type = _sg_gl_index_type(pip->index_type);
  3173. /* update depth-stencil state */
  3174. const sg_depth_stencil_state* new_ds = &pip->depth_stencil;
  3175. sg_depth_stencil_state* cache_ds = &_sg_gl.cache.ds;
  3176. if (new_ds->depth_compare_func != cache_ds->depth_compare_func) {
  3177. cache_ds->depth_compare_func = new_ds->depth_compare_func;
  3178. glDepthFunc(_sg_gl_compare_func(new_ds->depth_compare_func));
  3179. }
  3180. if (new_ds->depth_write_enabled != cache_ds->depth_write_enabled) {
  3181. cache_ds->depth_write_enabled = new_ds->depth_write_enabled;
  3182. glDepthMask(new_ds->depth_write_enabled);
  3183. }
  3184. if (new_ds->stencil_enabled != cache_ds->stencil_enabled) {
  3185. cache_ds->stencil_enabled = new_ds->stencil_enabled;
  3186. if (new_ds->stencil_enabled) glEnable(GL_STENCIL_TEST);
  3187. else glDisable(GL_STENCIL_TEST);
  3188. }
  3189. if (new_ds->stencil_write_mask != cache_ds->stencil_write_mask) {
  3190. cache_ds->stencil_write_mask = new_ds->stencil_write_mask;
  3191. glStencilMask(new_ds->stencil_write_mask);
  3192. }
  3193. for (int i = 0; i < 2; i++) {
  3194. const sg_stencil_state* new_ss = (i==0)? &new_ds->stencil_front : &new_ds->stencil_back;
  3195. sg_stencil_state* cache_ss = (i==0)? &cache_ds->stencil_front : &cache_ds->stencil_back;
  3196. GLenum gl_face = (i==0)? GL_FRONT : GL_BACK;
  3197. if ((new_ss->compare_func != cache_ss->compare_func) ||
  3198. (new_ds->stencil_read_mask != cache_ds->stencil_read_mask) ||
  3199. (new_ds->stencil_ref != cache_ds->stencil_ref))
  3200. {
  3201. cache_ss->compare_func = new_ss->compare_func;
  3202. glStencilFuncSeparate(gl_face,
  3203. _sg_gl_compare_func(new_ss->compare_func),
  3204. new_ds->stencil_ref,
  3205. new_ds->stencil_read_mask);
  3206. }
  3207. if ((new_ss->fail_op != cache_ss->fail_op) ||
  3208. (new_ss->depth_fail_op != cache_ss->depth_fail_op) ||
  3209. (new_ss->pass_op != cache_ss->pass_op))
  3210. {
  3211. cache_ss->fail_op = new_ss->fail_op;
  3212. cache_ss->depth_fail_op = new_ss->depth_fail_op;
  3213. cache_ss->pass_op = new_ss->pass_op;
  3214. glStencilOpSeparate(gl_face,
  3215. _sg_gl_stencil_op(new_ss->fail_op),
  3216. _sg_gl_stencil_op(new_ss->depth_fail_op),
  3217. _sg_gl_stencil_op(new_ss->pass_op));
  3218. }
  3219. }
  3220. cache_ds->stencil_read_mask = new_ds->stencil_read_mask;
  3221. cache_ds->stencil_ref = new_ds->stencil_ref;
  3222. /* update blend state */
  3223. const sg_blend_state* new_b = &pip->blend;
  3224. sg_blend_state* cache_b = &_sg_gl.cache.blend;
  3225. if (new_b->enabled != cache_b->enabled) {
  3226. cache_b->enabled = new_b->enabled;
  3227. if (new_b->enabled) glEnable(GL_BLEND);
  3228. else glDisable(GL_BLEND);
  3229. }
  3230. if ((new_b->src_factor_rgb != cache_b->src_factor_rgb) ||
  3231. (new_b->dst_factor_rgb != cache_b->dst_factor_rgb) ||
  3232. (new_b->src_factor_alpha != cache_b->src_factor_alpha) ||
  3233. (new_b->dst_factor_alpha != cache_b->dst_factor_alpha))
  3234. {
  3235. cache_b->src_factor_rgb = new_b->src_factor_rgb;
  3236. cache_b->dst_factor_rgb = new_b->dst_factor_rgb;
  3237. cache_b->src_factor_alpha = new_b->src_factor_alpha;
  3238. cache_b->dst_factor_alpha = new_b->dst_factor_alpha;
  3239. glBlendFuncSeparate(_sg_gl_blend_factor(new_b->src_factor_rgb),
  3240. _sg_gl_blend_factor(new_b->dst_factor_rgb),
  3241. _sg_gl_blend_factor(new_b->src_factor_alpha),
  3242. _sg_gl_blend_factor(new_b->dst_factor_alpha));
  3243. }
  3244. if ((new_b->op_rgb != cache_b->op_rgb) || (new_b->op_alpha != cache_b->op_alpha)) {
  3245. cache_b->op_rgb = new_b->op_rgb;
  3246. cache_b->op_alpha = new_b->op_alpha;
  3247. glBlendEquationSeparate(_sg_gl_blend_op(new_b->op_rgb), _sg_gl_blend_op(new_b->op_alpha));
  3248. }
  3249. if (new_b->color_write_mask != cache_b->color_write_mask) {
  3250. cache_b->color_write_mask = new_b->color_write_mask;
  3251. glColorMask((new_b->color_write_mask & SG_COLORMASK_R) != 0,
  3252. (new_b->color_write_mask & SG_COLORMASK_G) != 0,
  3253. (new_b->color_write_mask & SG_COLORMASK_B) != 0,
  3254. (new_b->color_write_mask & SG_COLORMASK_A) != 0);
  3255. }
  3256. if (!_sg_fequal(new_b->blend_color[0], cache_b->blend_color[0], 0.0001f) ||
  3257. !_sg_fequal(new_b->blend_color[1], cache_b->blend_color[1], 0.0001f) ||
  3258. !_sg_fequal(new_b->blend_color[2], cache_b->blend_color[2], 0.0001f) ||
  3259. !_sg_fequal(new_b->blend_color[3], cache_b->blend_color[3], 0.0001f))
  3260. {
  3261. const float* bc = new_b->blend_color;
  3262. for (int i=0; i<4; i++) {
  3263. cache_b->blend_color[i] = bc[i];
  3264. }
  3265. glBlendColor(bc[0], bc[1], bc[2], bc[3]);
  3266. }
  3267. /* update rasterizer state */
  3268. const sg_rasterizer_state* new_r = &pip->rast;
  3269. sg_rasterizer_state* cache_r = &_sg_gl.cache.rast;
  3270. if (new_r->cull_mode != cache_r->cull_mode) {
  3271. cache_r->cull_mode = new_r->cull_mode;
  3272. if (SG_CULLMODE_NONE == new_r->cull_mode) {
  3273. glDisable(GL_CULL_FACE);
  3274. }
  3275. else {
  3276. glEnable(GL_CULL_FACE);
  3277. GLenum gl_mode = (SG_CULLMODE_FRONT == new_r->cull_mode) ? GL_FRONT : GL_BACK;
  3278. glCullFace(gl_mode);
  3279. }
  3280. }
  3281. if (new_r->face_winding != cache_r->face_winding) {
  3282. cache_r->face_winding = new_r->face_winding;
  3283. GLenum gl_winding = (SG_FACEWINDING_CW == new_r->face_winding) ? GL_CW : GL_CCW;
  3284. glFrontFace(gl_winding);
  3285. }
  3286. if (new_r->alpha_to_coverage_enabled != cache_r->alpha_to_coverage_enabled) {
  3287. cache_r->alpha_to_coverage_enabled = new_r->alpha_to_coverage_enabled;
  3288. if (new_r->alpha_to_coverage_enabled) glEnable(GL_SAMPLE_ALPHA_TO_COVERAGE);
  3289. else glDisable(GL_SAMPLE_ALPHA_TO_COVERAGE);
  3290. }
  3291. #ifdef SOKOL_GLCORE33
  3292. if (new_r->sample_count != cache_r->sample_count) {
  3293. cache_r->sample_count = new_r->sample_count;
  3294. if (new_r->sample_count > 1) glEnable(GL_MULTISAMPLE);
  3295. else glDisable(GL_MULTISAMPLE);
  3296. }
  3297. #endif
  3298. if (!_sg_fequal(new_r->depth_bias, cache_r->depth_bias, 0.000001f) ||
  3299. !_sg_fequal(new_r->depth_bias_slope_scale, cache_r->depth_bias_slope_scale, 0.000001f))
  3300. {
  3301. /* according to ANGLE's D3D11 backend:
  3302. D3D11 SlopeScaledDepthBias ==> GL polygonOffsetFactor
  3303. D3D11 DepthBias ==> GL polygonOffsetUnits
  3304. DepthBiasClamp has no meaning on GL
  3305. */
  3306. cache_r->depth_bias = new_r->depth_bias;
  3307. cache_r->depth_bias_slope_scale = new_r->depth_bias_slope_scale;
  3308. glPolygonOffset(new_r->depth_bias_slope_scale, new_r->depth_bias);
  3309. bool po_enabled = true;
  3310. if (_sg_fequal(new_r->depth_bias, 0.0f, 0.000001f) &&
  3311. _sg_fequal(new_r->depth_bias_slope_scale, 0.0f, 0.000001f))
  3312. {
  3313. po_enabled = false;
  3314. }
  3315. if (po_enabled != _sg_gl.cache.polygon_offset_enabled) {
  3316. _sg_gl.cache.polygon_offset_enabled = po_enabled;
  3317. if (po_enabled) glEnable(GL_POLYGON_OFFSET_FILL);
  3318. else glDisable(GL_POLYGON_OFFSET_FILL);
  3319. }
  3320. }
  3321. /* bind shader program */
  3322. glUseProgram(pip->shader->gl_prog);
  3323. }
  3324. /* bind textures */
  3325. _SG_GL_CHECK_ERROR();
  3326. for (int stage_index = 0; stage_index < SG_NUM_SHADER_STAGES; stage_index++) {
  3327. const _sg_shader_stage* stage = &pip->shader->stage[stage_index];
  3328. _sg_image** imgs = (stage_index == SG_SHADERSTAGE_VS)? vs_imgs : fs_imgs;
  3329. SOKOL_ASSERT(((stage_index == SG_SHADERSTAGE_VS)? num_vs_imgs : num_fs_imgs) == stage->num_images);
  3330. for (int img_index = 0; img_index < stage->num_images; img_index++) {
  3331. const _sg_shader_image* shd_img = &stage->images[img_index];
  3332. if (shd_img->gl_loc != -1) {
  3333. _sg_image* img = imgs[img_index];
  3334. const GLuint gl_tex = img->gl_tex[img->active_slot];
  3335. SOKOL_ASSERT(img && img->gl_target);
  3336. SOKOL_ASSERT((shd_img->gl_tex_slot != -1) && gl_tex);
  3337. glUniform1i(shd_img->gl_loc, shd_img->gl_tex_slot);
  3338. glActiveTexture(GL_TEXTURE0+shd_img->gl_tex_slot);
  3339. glBindTexture(img->gl_target, gl_tex);
  3340. }
  3341. }
  3342. }
  3343. _SG_GL_CHECK_ERROR();
  3344. /* index buffer (can be 0) */
  3345. const GLuint gl_ib = ib ? ib->gl_buf[ib->active_slot] : 0;
  3346. if (gl_ib != _sg_gl.cache.cur_gl_ib) {
  3347. _sg_gl.cache.cur_gl_ib = gl_ib;
  3348. glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, gl_ib);
  3349. }
  3350. /* vertex attributes */
  3351. GLuint gl_vb = 0;
  3352. for (int attr_index = 0; attr_index < SG_MAX_VERTEX_ATTRIBUTES; attr_index++) {
  3353. _sg_gl_attr* attr = &pip->gl_attrs[attr_index];
  3354. _sg_gl_cache_attr* cache_attr = &_sg_gl.cache.attrs[attr_index];
  3355. bool cache_attr_dirty = false;
  3356. if (attr->vb_index >= 0) {
  3357. /* attribute is enabled */
  3358. SOKOL_ASSERT(attr->vb_index < num_vbs);
  3359. _sg_buffer* vb = vbs[attr->vb_index];
  3360. SOKOL_ASSERT(vb);
  3361. if ((vb->gl_buf[vb->active_slot] != cache_attr->gl_vbuf) ||
  3362. (attr->size != cache_attr->gl_attr.size) ||
  3363. (attr->type != cache_attr->gl_attr.type) ||
  3364. (attr->normalized != cache_attr->gl_attr.normalized) ||
  3365. (attr->stride != cache_attr->gl_attr.stride) ||
  3366. (attr->offset != cache_attr->gl_attr.offset))
  3367. {
  3368. if (gl_vb != vb->gl_buf[vb->active_slot]) {
  3369. gl_vb = vb->gl_buf[vb->active_slot];
  3370. glBindBuffer(GL_ARRAY_BUFFER, gl_vb);
  3371. }
  3372. glVertexAttribPointer(attr_index, attr->size, attr->type,
  3373. attr->normalized, attr->stride,
  3374. (const GLvoid*)(GLintptr)attr->offset);
  3375. cache_attr_dirty = true;
  3376. }
  3377. if (cache_attr->gl_attr.vb_index == -1) {
  3378. glEnableVertexAttribArray(attr_index);
  3379. cache_attr_dirty = true;
  3380. }
  3381. if (_sg_gl.features[SG_FEATURE_INSTANCING]) {
  3382. if (cache_attr->gl_attr.divisor != attr->divisor) {
  3383. glVertexAttribDivisor(attr_index, attr->divisor);
  3384. cache_attr_dirty = true;
  3385. }
  3386. }
  3387. }
  3388. else {
  3389. /* attribute is disabled */
  3390. if (cache_attr->gl_attr.vb_index != -1) {
  3391. glDisableVertexAttribArray(attr_index);
  3392. cache_attr_dirty = true;
  3393. }
  3394. }
  3395. if (cache_attr_dirty) {
  3396. cache_attr->gl_attr = *attr;
  3397. cache_attr->gl_vbuf = gl_vb;
  3398. }
  3399. }
  3400. _SG_GL_CHECK_ERROR();
  3401. }
  3402. _SOKOL_PRIVATE void _sg_apply_uniform_block(sg_shader_stage stage_index, int ub_index, const void* data, int num_bytes) {
  3403. SOKOL_ASSERT(data && (num_bytes > 0));
  3404. SOKOL_ASSERT((stage_index >= 0) && ((int)stage_index < SG_NUM_SHADER_STAGES));
  3405. SOKOL_ASSERT(_sg_gl.cache.cur_pipeline);
  3406. SOKOL_ASSERT(_sg_gl.cache.cur_pipeline->slot.id == _sg_gl.cache.cur_pipeline_id.id);
  3407. SOKOL_ASSERT(_sg_gl.cache.cur_pipeline->shader->slot.id == _sg_gl.cache.cur_pipeline->shader_id.id);
  3408. _sg_shader_stage* stage = &_sg_gl.cache.cur_pipeline->shader->stage[stage_index];
  3409. SOKOL_ASSERT(ub_index < stage->num_uniform_blocks);
  3410. _sg_uniform_block* ub = &stage->uniform_blocks[ub_index];
  3411. SOKOL_ASSERT(ub->size == num_bytes);
  3412. for (int u_index = 0; u_index < ub->num_uniforms; u_index++) {
  3413. _sg_uniform* u = &ub->uniforms[u_index];
  3414. SOKOL_ASSERT(u->type != SG_UNIFORMTYPE_INVALID);
  3415. if (u->gl_loc == -1) {
  3416. continue;
  3417. }
  3418. GLfloat* ptr = (GLfloat*) (((uint8_t*)data) + u->offset);
  3419. switch (u->type) {
  3420. case SG_UNIFORMTYPE_INVALID:
  3421. break;
  3422. case SG_UNIFORMTYPE_FLOAT:
  3423. glUniform1fv(u->gl_loc, u->count, ptr);
  3424. break;
  3425. case SG_UNIFORMTYPE_FLOAT2:
  3426. glUniform2fv(u->gl_loc, u->count, ptr);
  3427. break;
  3428. case SG_UNIFORMTYPE_FLOAT3:
  3429. glUniform3fv(u->gl_loc, u->count, ptr);
  3430. break;
  3431. case SG_UNIFORMTYPE_FLOAT4:
  3432. glUniform4fv(u->gl_loc, u->count, ptr);
  3433. break;
  3434. case SG_UNIFORMTYPE_MAT4:
  3435. glUniformMatrix4fv(u->gl_loc, u->count, GL_FALSE, ptr);
  3436. break;
  3437. default:
  3438. SOKOL_UNREACHABLE;
  3439. break;
  3440. }
  3441. }
  3442. }
  3443. _SOKOL_PRIVATE void _sg_draw(int base_element, int num_elements, int num_instances) {
  3444. const GLenum i_type = _sg_gl.cache.cur_index_type;
  3445. const GLenum p_type = _sg_gl.cache.cur_primitive_type;
  3446. if (0 != i_type) {
  3447. /* indexed rendering */
  3448. const int i_size = (i_type == GL_UNSIGNED_SHORT) ? 2 : 4;
  3449. const GLvoid* indices = (const GLvoid*)(GLintptr)(base_element*i_size);
  3450. if (num_instances == 1) {
  3451. glDrawElements(p_type, num_elements, i_type, indices);
  3452. }
  3453. else {
  3454. if (_sg_gl.features[SG_FEATURE_INSTANCING]) {
  3455. glDrawElementsInstanced(p_type, num_elements, i_type, indices, num_instances);
  3456. }
  3457. }
  3458. }
  3459. else {
  3460. /* non-indexed rendering */
  3461. if (num_instances == 1) {
  3462. glDrawArrays(p_type, base_element, num_elements);
  3463. }
  3464. else {
  3465. if (_sg_gl.features[SG_FEATURE_INSTANCING]) {
  3466. glDrawArraysInstanced(p_type, base_element, num_elements, num_instances);
  3467. }
  3468. }
  3469. }
  3470. }
  3471. _SOKOL_PRIVATE void _sg_commit() {
  3472. SOKOL_ASSERT(!_sg_gl.in_pass);
  3473. }
  3474. _SOKOL_PRIVATE void _sg_update_buffer(_sg_buffer* buf, const void* data_ptr, int data_size) {
  3475. SOKOL_ASSERT(buf && data_ptr && (data_size > 0));
  3476. /* only one update per buffer per frame allowed */
  3477. if (++buf->active_slot >= buf->num_slots) {
  3478. buf->active_slot = 0;
  3479. }
  3480. GLenum gl_tgt = _sg_gl_buffer_target(buf->type);
  3481. SOKOL_ASSERT(buf->active_slot < SG_NUM_INFLIGHT_FRAMES);
  3482. GLuint gl_buf = buf->gl_buf[buf->active_slot];
  3483. SOKOL_ASSERT(gl_buf);
  3484. _SG_GL_CHECK_ERROR();
  3485. glBindBuffer(gl_tgt, gl_buf);
  3486. glBufferSubData(gl_tgt, 0, data_size, data_ptr);
  3487. _SG_GL_CHECK_ERROR();
  3488. }
  3489. _SOKOL_PRIVATE void _sg_update_image(_sg_image* img, const sg_image_content* data) {
  3490. SOKOL_ASSERT(img && data);
  3491. /* only one update per image per frame allowed */
  3492. if (++img->active_slot >= img->num_slots) {
  3493. img->active_slot = 0;
  3494. }
  3495. SOKOL_ASSERT(img->active_slot < SG_NUM_INFLIGHT_FRAMES);
  3496. SOKOL_ASSERT(0 != img->gl_tex[img->active_slot]);
  3497. glBindTexture(img->gl_target, img->gl_tex[img->active_slot]);
  3498. const GLenum gl_img_format = _sg_gl_teximage_format(img->pixel_format);
  3499. const GLenum gl_img_type = _sg_gl_teximage_type(img->pixel_format);
  3500. const int num_faces = img->type == SG_IMAGETYPE_CUBE ? 6 : 1;
  3501. const int num_mips = img->num_mipmaps;
  3502. for (int face_index = 0; face_index < num_faces; face_index++) {
  3503. for (int mip_index = 0; mip_index < num_mips; mip_index++) {
  3504. GLenum gl_img_target = img->gl_target;
  3505. if (SG_IMAGETYPE_CUBE == img->type) {
  3506. gl_img_target = _sg_gl_cubeface_target(face_index);
  3507. }
  3508. const GLvoid* data_ptr = data->subimage[face_index][mip_index].ptr;
  3509. int mip_width = img->width >> mip_index;
  3510. if (mip_width == 0) {
  3511. mip_width = 1;
  3512. }
  3513. int mip_height = img->height >> mip_index;
  3514. if (mip_height == 0) {
  3515. mip_height = 1;
  3516. }
  3517. if ((SG_IMAGETYPE_2D == img->type) || (SG_IMAGETYPE_CUBE == img->type)) {
  3518. glTexSubImage2D(gl_img_target, mip_index,
  3519. 0, 0,
  3520. mip_width, mip_height,
  3521. gl_img_format, gl_img_type,
  3522. data_ptr);
  3523. }
  3524. #if !defined(SOKOL_GLES2)
  3525. else if (!_sg_gl_gles2 && ((SG_IMAGETYPE_3D == img->type) || (SG_IMAGETYPE_ARRAY == img->type))) {
  3526. int mip_depth = img->depth >> mip_index;
  3527. if (mip_depth == 0) {
  3528. mip_depth = 1;
  3529. }
  3530. glTexSubImage3D(gl_img_target, mip_index,
  3531. 0, 0, 0,
  3532. mip_width, mip_height, mip_depth,
  3533. gl_img_format, gl_img_type,
  3534. data_ptr);
  3535. }
  3536. #endif
  3537. }
  3538. }
  3539. }
  3540. _SOKOL_PRIVATE void _sg_reset_state_cache() {
  3541. #if !defined(SOKOL_GLES2)
  3542. if (!_sg_gl_gles2) {
  3543. glBindVertexArray(_sg_gl.vao);
  3544. }
  3545. #endif
  3546. _sg_gl_reset_state_cache(&_sg_gl.cache);
  3547. }
  3548. #ifdef __cplusplus
  3549. } /* extern "C" */
  3550. #endif
  3551. /*== D3D11 BACKEND ===========================================================*/
  3552. #elif defined(SOKOL_D3D11)
  3553. #ifndef D3D11_NO_HELPERS
  3554. #define D3D11_NO_HELPERS
  3555. #endif
  3556. #ifndef CINTERFACE
  3557. #define CINTERFACE
  3558. #endif
  3559. #ifndef COBJMACROS
  3560. #define COBJMACROS
  3561. #endif
  3562. #ifndef WIN32_LEAN_AND_MEAN
  3563. #define WIN32_LEAN_AND_MEAN
  3564. #endif
  3565. #include <windows.h>
  3566. #include <d3d11.h>
  3567. #pragma comment (lib, "user32.lib")
  3568. #pragma comment (lib, "dxgi.lib")
  3569. #pragma comment (lib, "d3d11.lib")
  3570. #pragma comment (lib, "dxguid.lib")
  3571. #if defined(SOKOL_D3D11_SHADER_COMPILER)
  3572. #include <d3dcompiler.h>
  3573. #pragma comment (lib, "d3dcompiler.lib")
  3574. #endif
  3575. #ifdef __cplusplus
  3576. extern "C" {
  3577. #endif
  3578. /*-- enum translation functions ----------------------------------------------*/
  3579. _SOKOL_PRIVATE D3D11_USAGE _sg_d3d11_usage(sg_usage usg) {
  3580. switch (usg) {
  3581. case SG_USAGE_IMMUTABLE:
  3582. return D3D11_USAGE_IMMUTABLE;
  3583. case SG_USAGE_DYNAMIC:
  3584. case SG_USAGE_STREAM:
  3585. return D3D11_USAGE_DYNAMIC;
  3586. default:
  3587. SOKOL_UNREACHABLE;
  3588. return (D3D11_USAGE) 0;
  3589. }
  3590. }
  3591. _SOKOL_PRIVATE UINT _sg_d3d11_cpu_access_flags(sg_usage usg) {
  3592. switch (usg) {
  3593. case SG_USAGE_IMMUTABLE:
  3594. return 0;
  3595. case SG_USAGE_DYNAMIC:
  3596. case SG_USAGE_STREAM:
  3597. return D3D11_CPU_ACCESS_WRITE;
  3598. default:
  3599. SOKOL_UNREACHABLE;
  3600. return 0;
  3601. }
  3602. }
  3603. _SOKOL_PRIVATE DXGI_FORMAT _sg_d3d11_texture_format(sg_pixel_format fmt) {
  3604. /*
  3605. NOTE: the following pixel formats are only supported on D3D11.1
  3606. (we're running on D3D11.0):
  3607. DXGI_FORMAT_B4G4R4A4_UNORM
  3608. DXGI_FORMAT_B5G6R5_UNORM
  3609. DXGI_FORMAT_B5G5R5A1_UNORM
  3610. */
  3611. switch (fmt) {
  3612. case SG_PIXELFORMAT_RGBA8: return DXGI_FORMAT_R8G8B8A8_UNORM;
  3613. case SG_PIXELFORMAT_R10G10B10A2: return DXGI_FORMAT_R10G10B10A2_UNORM;
  3614. case SG_PIXELFORMAT_RGBA32F: return DXGI_FORMAT_R32G32B32A32_FLOAT;
  3615. case SG_PIXELFORMAT_RGBA16F: return DXGI_FORMAT_R16G16B16A16_FLOAT;
  3616. case SG_PIXELFORMAT_R32F: return DXGI_FORMAT_R32_FLOAT;
  3617. case SG_PIXELFORMAT_R16F: return DXGI_FORMAT_R16_FLOAT;
  3618. case SG_PIXELFORMAT_L8: return DXGI_FORMAT_R8_UNORM;
  3619. case SG_PIXELFORMAT_DXT1: return DXGI_FORMAT_BC1_UNORM;
  3620. case SG_PIXELFORMAT_DXT3: return DXGI_FORMAT_BC2_UNORM;
  3621. case SG_PIXELFORMAT_DXT5: return DXGI_FORMAT_BC3_UNORM;
  3622. default: return DXGI_FORMAT_UNKNOWN;
  3623. };
  3624. }
  3625. _SOKOL_PRIVATE DXGI_FORMAT _sg_d3d11_rendertarget_color_format(sg_pixel_format fmt) {
  3626. switch (fmt) {
  3627. case SG_PIXELFORMAT_RGBA8: return DXGI_FORMAT_R8G8B8A8_UNORM;
  3628. case SG_PIXELFORMAT_RGBA32F: return DXGI_FORMAT_R32G32B32A32_FLOAT;
  3629. case SG_PIXELFORMAT_RGBA16F: return DXGI_FORMAT_R16G16B16A16_FLOAT;
  3630. case SG_PIXELFORMAT_R32F: return DXGI_FORMAT_R32_FLOAT;
  3631. case SG_PIXELFORMAT_R16F: return DXGI_FORMAT_R16_FLOAT;
  3632. case SG_PIXELFORMAT_L8: return DXGI_FORMAT_R8_UNORM;
  3633. default: return DXGI_FORMAT_UNKNOWN;
  3634. }
  3635. }
  3636. _SOKOL_PRIVATE DXGI_FORMAT _sg_d3d11_rendertarget_depth_format(sg_pixel_format fmt) {
  3637. switch (fmt) {
  3638. case SG_PIXELFORMAT_DEPTH: return DXGI_FORMAT_D16_UNORM;
  3639. case SG_PIXELFORMAT_DEPTHSTENCIL: return DXGI_FORMAT_D24_UNORM_S8_UINT;
  3640. default: return DXGI_FORMAT_UNKNOWN;
  3641. }
  3642. }
  3643. _SOKOL_PRIVATE D3D11_PRIMITIVE_TOPOLOGY _sg_d3d11_primitive_topology(sg_primitive_type prim_type) {
  3644. switch (prim_type) {
  3645. case SG_PRIMITIVETYPE_POINTS: return D3D11_PRIMITIVE_TOPOLOGY_POINTLIST;
  3646. case SG_PRIMITIVETYPE_LINES: return D3D11_PRIMITIVE_TOPOLOGY_LINELIST;
  3647. case SG_PRIMITIVETYPE_LINE_STRIP: return D3D11_PRIMITIVE_TOPOLOGY_LINESTRIP;
  3648. case SG_PRIMITIVETYPE_TRIANGLES: return D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST;
  3649. case SG_PRIMITIVETYPE_TRIANGLE_STRIP: return D3D11_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP;
  3650. default: SOKOL_UNREACHABLE; return (D3D11_PRIMITIVE_TOPOLOGY) 0;
  3651. }
  3652. }
  3653. _SOKOL_PRIVATE DXGI_FORMAT _sg_d3d11_index_format(sg_index_type index_type) {
  3654. switch (index_type) {
  3655. case SG_INDEXTYPE_NONE: return DXGI_FORMAT_UNKNOWN;
  3656. case SG_INDEXTYPE_UINT16: return DXGI_FORMAT_R16_UINT;
  3657. case SG_INDEXTYPE_UINT32: return DXGI_FORMAT_R32_UINT;
  3658. default: SOKOL_UNREACHABLE; return (DXGI_FORMAT) 0;
  3659. }
  3660. }
  3661. _SOKOL_PRIVATE D3D11_FILTER _sg_d3d11_filter(sg_filter min_f, sg_filter mag_f, uint32_t max_anisotropy) {
  3662. if (max_anisotropy > 1) {
  3663. return D3D11_FILTER_ANISOTROPIC;
  3664. }
  3665. else if (mag_f == SG_FILTER_NEAREST) {
  3666. switch (min_f) {
  3667. case SG_FILTER_NEAREST:
  3668. case SG_FILTER_NEAREST_MIPMAP_NEAREST:
  3669. return D3D11_FILTER_MIN_MAG_MIP_POINT;
  3670. case SG_FILTER_LINEAR:
  3671. case SG_FILTER_LINEAR_MIPMAP_NEAREST:
  3672. return D3D11_FILTER_MIN_LINEAR_MAG_MIP_POINT;
  3673. case SG_FILTER_NEAREST_MIPMAP_LINEAR:
  3674. return D3D11_FILTER_MIN_MAG_POINT_MIP_LINEAR;
  3675. case SG_FILTER_LINEAR_MIPMAP_LINEAR:
  3676. return D3D11_FILTER_MIN_LINEAR_MAG_POINT_MIP_LINEAR;
  3677. default:
  3678. SOKOL_UNREACHABLE; break;
  3679. }
  3680. }
  3681. else if (mag_f == SG_FILTER_LINEAR) {
  3682. switch (min_f) {
  3683. case SG_FILTER_NEAREST:
  3684. case SG_FILTER_NEAREST_MIPMAP_NEAREST:
  3685. return D3D11_FILTER_MIN_POINT_MAG_LINEAR_MIP_POINT;
  3686. case SG_FILTER_LINEAR:
  3687. case SG_FILTER_LINEAR_MIPMAP_NEAREST:
  3688. return D3D11_FILTER_MIN_MAG_LINEAR_MIP_POINT;
  3689. case SG_FILTER_NEAREST_MIPMAP_LINEAR:
  3690. return D3D11_FILTER_MIN_POINT_MAG_MIP_LINEAR;
  3691. case SG_FILTER_LINEAR_MIPMAP_LINEAR:
  3692. return D3D11_FILTER_MIN_MAG_MIP_LINEAR;
  3693. default:
  3694. SOKOL_UNREACHABLE; break;
  3695. }
  3696. }
  3697. /* invalid value for mag filter */
  3698. SOKOL_UNREACHABLE;
  3699. return D3D11_FILTER_MIN_MAG_MIP_POINT;
  3700. }
  3701. _SOKOL_PRIVATE D3D11_TEXTURE_ADDRESS_MODE _sg_d3d11_address_mode(sg_wrap m) {
  3702. switch (m) {
  3703. case SG_WRAP_REPEAT: return D3D11_TEXTURE_ADDRESS_WRAP;
  3704. case SG_WRAP_CLAMP_TO_EDGE: return D3D11_TEXTURE_ADDRESS_CLAMP;
  3705. case SG_WRAP_MIRRORED_REPEAT: return D3D11_TEXTURE_ADDRESS_MIRROR;
  3706. default: SOKOL_UNREACHABLE; return (D3D11_TEXTURE_ADDRESS_MODE) 0;
  3707. }
  3708. }
  3709. _SOKOL_PRIVATE DXGI_FORMAT _sg_d3d11_vertex_format(sg_vertex_format fmt) {
  3710. switch (fmt) {
  3711. case SG_VERTEXFORMAT_FLOAT: return DXGI_FORMAT_R32_FLOAT;
  3712. case SG_VERTEXFORMAT_FLOAT2: return DXGI_FORMAT_R32G32_FLOAT;
  3713. case SG_VERTEXFORMAT_FLOAT3: return DXGI_FORMAT_R32G32B32_FLOAT;
  3714. case SG_VERTEXFORMAT_FLOAT4: return DXGI_FORMAT_R32G32B32A32_FLOAT;
  3715. case SG_VERTEXFORMAT_BYTE4: return DXGI_FORMAT_R8G8B8A8_SINT;
  3716. case SG_VERTEXFORMAT_BYTE4N: return DXGI_FORMAT_R8G8B8A8_SNORM;
  3717. case SG_VERTEXFORMAT_UBYTE4: return DXGI_FORMAT_R8G8B8A8_UINT;
  3718. case SG_VERTEXFORMAT_UBYTE4N: return DXGI_FORMAT_R8G8B8A8_UNORM;
  3719. case SG_VERTEXFORMAT_SHORT2: return DXGI_FORMAT_R16G16_SINT;
  3720. case SG_VERTEXFORMAT_SHORT2N: return DXGI_FORMAT_R16G16_SNORM;
  3721. case SG_VERTEXFORMAT_SHORT4: return DXGI_FORMAT_R16G16B16A16_SINT;
  3722. case SG_VERTEXFORMAT_SHORT4N: return DXGI_FORMAT_R16G16B16A16_SNORM;
  3723. /* FIXME: signed 10-10-10-2 vertex format not supported on d3d11 (only unsigned) */
  3724. default: SOKOL_UNREACHABLE; return (DXGI_FORMAT) 0;
  3725. }
  3726. }
  3727. _SOKOL_PRIVATE D3D11_INPUT_CLASSIFICATION _sg_d3d11_input_classification(sg_vertex_step step) {
  3728. switch (step) {
  3729. case SG_VERTEXSTEP_PER_VERTEX: return D3D11_INPUT_PER_VERTEX_DATA;
  3730. case SG_VERTEXSTEP_PER_INSTANCE: return D3D11_INPUT_PER_INSTANCE_DATA;
  3731. default: SOKOL_UNREACHABLE; return (D3D11_INPUT_CLASSIFICATION) 0;
  3732. }
  3733. }
  3734. _SOKOL_PRIVATE D3D11_CULL_MODE _sg_d3d11_cull_mode(sg_cull_mode m) {
  3735. switch (m) {
  3736. case SG_CULLMODE_NONE: return D3D11_CULL_NONE;
  3737. case SG_CULLMODE_FRONT: return D3D11_CULL_FRONT;
  3738. case SG_CULLMODE_BACK: return D3D11_CULL_BACK;
  3739. default: SOKOL_UNREACHABLE; return (D3D11_CULL_MODE) 0;
  3740. }
  3741. }
  3742. _SOKOL_PRIVATE D3D11_COMPARISON_FUNC _sg_d3d11_compare_func(sg_compare_func f) {
  3743. switch (f) {
  3744. case SG_COMPAREFUNC_NEVER: return D3D11_COMPARISON_NEVER;
  3745. case SG_COMPAREFUNC_LESS: return D3D11_COMPARISON_LESS;
  3746. case SG_COMPAREFUNC_EQUAL: return D3D11_COMPARISON_EQUAL;
  3747. case SG_COMPAREFUNC_LESS_EQUAL: return D3D11_COMPARISON_LESS_EQUAL;
  3748. case SG_COMPAREFUNC_GREATER: return D3D11_COMPARISON_GREATER;
  3749. case SG_COMPAREFUNC_NOT_EQUAL: return D3D11_COMPARISON_NOT_EQUAL;
  3750. case SG_COMPAREFUNC_GREATER_EQUAL: return D3D11_COMPARISON_GREATER_EQUAL;
  3751. case SG_COMPAREFUNC_ALWAYS: return D3D11_COMPARISON_ALWAYS;
  3752. default: SOKOL_UNREACHABLE; return (D3D11_COMPARISON_FUNC) 0;
  3753. }
  3754. }
  3755. _SOKOL_PRIVATE D3D11_STENCIL_OP _sg_d3d11_stencil_op(sg_stencil_op op) {
  3756. switch (op) {
  3757. case SG_STENCILOP_KEEP: return D3D11_STENCIL_OP_KEEP;
  3758. case SG_STENCILOP_ZERO: return D3D11_STENCIL_OP_ZERO;
  3759. case SG_STENCILOP_REPLACE: return D3D11_STENCIL_OP_REPLACE;
  3760. case SG_STENCILOP_INCR_CLAMP: return D3D11_STENCIL_OP_INCR_SAT;
  3761. case SG_STENCILOP_DECR_CLAMP: return D3D11_STENCIL_OP_DECR_SAT;
  3762. case SG_STENCILOP_INVERT: return D3D11_STENCIL_OP_INVERT;
  3763. case SG_STENCILOP_INCR_WRAP: return D3D11_STENCIL_OP_INCR;
  3764. case SG_STENCILOP_DECR_WRAP: return D3D11_STENCIL_OP_DECR;
  3765. default: SOKOL_UNREACHABLE; return (D3D11_STENCIL_OP) 0;
  3766. }
  3767. }
  3768. _SOKOL_PRIVATE D3D11_BLEND _sg_d3d11_blend_factor(sg_blend_factor f) {
  3769. switch (f) {
  3770. case SG_BLENDFACTOR_ZERO: return D3D11_BLEND_ZERO;
  3771. case SG_BLENDFACTOR_ONE: return D3D11_BLEND_ONE;
  3772. case SG_BLENDFACTOR_SRC_COLOR: return D3D11_BLEND_SRC_COLOR;
  3773. case SG_BLENDFACTOR_ONE_MINUS_SRC_COLOR: return D3D11_BLEND_INV_SRC_COLOR;
  3774. case SG_BLENDFACTOR_SRC_ALPHA: return D3D11_BLEND_SRC_ALPHA;
  3775. case SG_BLENDFACTOR_ONE_MINUS_SRC_ALPHA: return D3D11_BLEND_INV_SRC_ALPHA;
  3776. case SG_BLENDFACTOR_DST_COLOR: return D3D11_BLEND_DEST_COLOR;
  3777. case SG_BLENDFACTOR_ONE_MINUS_DST_COLOR: return D3D11_BLEND_INV_DEST_COLOR;
  3778. case SG_BLENDFACTOR_DST_ALPHA: return D3D11_BLEND_DEST_ALPHA;
  3779. case SG_BLENDFACTOR_ONE_MINUS_DST_ALPHA: return D3D11_BLEND_INV_DEST_ALPHA;
  3780. case SG_BLENDFACTOR_SRC_ALPHA_SATURATED: return D3D11_BLEND_SRC_ALPHA_SAT;
  3781. case SG_BLENDFACTOR_BLEND_COLOR: return D3D11_BLEND_BLEND_FACTOR;
  3782. case SG_BLENDFACTOR_ONE_MINUS_BLEND_COLOR: return D3D11_BLEND_INV_BLEND_FACTOR;
  3783. case SG_BLENDFACTOR_BLEND_ALPHA: return D3D11_BLEND_BLEND_FACTOR;
  3784. case SG_BLENDFACTOR_ONE_MINUS_BLEND_ALPHA: return D3D11_BLEND_INV_BLEND_FACTOR;
  3785. default: SOKOL_UNREACHABLE; return (D3D11_BLEND) 0;
  3786. }
  3787. }
  3788. _SOKOL_PRIVATE D3D11_BLEND_OP _sg_d3d11_blend_op(sg_blend_op op) {
  3789. switch (op) {
  3790. case SG_BLENDOP_ADD: return D3D11_BLEND_OP_ADD;
  3791. case SG_BLENDOP_SUBTRACT: return D3D11_BLEND_OP_SUBTRACT;
  3792. case SG_BLENDOP_REVERSE_SUBTRACT: return D3D11_BLEND_OP_REV_SUBTRACT;
  3793. default: SOKOL_UNREACHABLE; return (D3D11_BLEND_OP) 0;
  3794. }
  3795. }
  3796. _SOKOL_PRIVATE UINT8 _sg_d3d11_color_write_mask(sg_color_mask m) {
  3797. UINT8 res = 0;
  3798. if (m & SG_COLORMASK_R) {
  3799. res |= D3D11_COLOR_WRITE_ENABLE_RED;
  3800. }
  3801. if (m & SG_COLORMASK_G) {
  3802. res |= D3D11_COLOR_WRITE_ENABLE_GREEN;
  3803. }
  3804. if (m & SG_COLORMASK_B) {
  3805. res |= D3D11_COLOR_WRITE_ENABLE_BLUE;
  3806. }
  3807. if (m & SG_COLORMASK_A) {
  3808. res |= D3D11_COLOR_WRITE_ENABLE_ALPHA;
  3809. }
  3810. return res;
  3811. }
  3812. /*-- backend resource structures ---------------------------------------------*/
  3813. typedef struct {
  3814. _sg_slot slot;
  3815. int size;
  3816. sg_buffer_type type;
  3817. sg_usage usage;
  3818. uint32_t upd_frame_index;
  3819. ID3D11Buffer* d3d11_buf;
  3820. } _sg_buffer;
  3821. _SOKOL_PRIVATE void _sg_init_buffer(_sg_buffer* buf) {
  3822. SOKOL_ASSERT(buf);
  3823. memset(buf, 0, sizeof(_sg_buffer));
  3824. }
  3825. typedef struct {
  3826. _sg_slot slot;
  3827. sg_image_type type;
  3828. bool render_target;
  3829. int width;
  3830. int height;
  3831. int depth;
  3832. int num_mipmaps;
  3833. sg_usage usage;
  3834. sg_pixel_format pixel_format;
  3835. int sample_count;
  3836. sg_filter min_filter;
  3837. sg_filter mag_filter;
  3838. sg_wrap wrap_u;
  3839. sg_wrap wrap_v;
  3840. sg_wrap wrap_w;
  3841. uint32_t max_anisotropy;
  3842. int upd_frame_index;
  3843. DXGI_FORMAT d3d11_format;
  3844. ID3D11Texture2D* d3d11_tex2d;
  3845. ID3D11Texture3D* d3d11_tex3d;
  3846. ID3D11Texture2D* d3d11_texds;
  3847. ID3D11Texture2D* d3d11_texmsaa;
  3848. ID3D11ShaderResourceView* d3d11_srv;
  3849. ID3D11SamplerState* d3d11_smp;
  3850. } _sg_image;
  3851. _SOKOL_PRIVATE void _sg_init_image(_sg_image* img) {
  3852. SOKOL_ASSERT(img);
  3853. memset(img, 0, sizeof(_sg_image));
  3854. }
  3855. typedef struct {
  3856. int size;
  3857. } _sg_uniform_block;
  3858. typedef struct {
  3859. sg_image_type type;
  3860. } _sg_shader_image;
  3861. typedef struct {
  3862. int num_uniform_blocks;
  3863. int num_images;
  3864. _sg_uniform_block uniform_blocks[SG_MAX_SHADERSTAGE_UBS];
  3865. _sg_shader_image images[SG_MAX_SHADERSTAGE_IMAGES];
  3866. ID3D11Buffer* d3d11_cbs[SG_MAX_SHADERSTAGE_UBS];
  3867. } _sg_shader_stage;
  3868. typedef struct {
  3869. _sg_slot slot;
  3870. _sg_shader_stage stage[SG_NUM_SHADER_STAGES];
  3871. ID3D11VertexShader* d3d11_vs;
  3872. ID3D11PixelShader* d3d11_fs;
  3873. void* d3d11_vs_blob;
  3874. int d3d11_vs_blob_length;
  3875. } _sg_shader;
  3876. _SOKOL_PRIVATE void _sg_init_shader(_sg_shader* shd) {
  3877. SOKOL_ASSERT(shd);
  3878. memset(shd, 0, sizeof(_sg_shader));
  3879. }
  3880. typedef struct {
  3881. _sg_slot slot;
  3882. _sg_shader* shader;
  3883. sg_shader shader_id;
  3884. sg_index_type index_type;
  3885. bool vertex_layout_valid[SG_MAX_SHADERSTAGE_BUFFERS];
  3886. int color_attachment_count;
  3887. sg_pixel_format color_format;
  3888. sg_pixel_format depth_format;
  3889. int sample_count;
  3890. float blend_color[4];
  3891. UINT d3d11_stencil_ref;
  3892. UINT d3d11_vb_strides[SG_MAX_SHADERSTAGE_BUFFERS];
  3893. D3D_PRIMITIVE_TOPOLOGY d3d11_topology;
  3894. DXGI_FORMAT d3d11_index_format;
  3895. ID3D11InputLayout* d3d11_il;
  3896. ID3D11RasterizerState* d3d11_rs;
  3897. ID3D11DepthStencilState* d3d11_dss;
  3898. ID3D11BlendState* d3d11_bs;
  3899. } _sg_pipeline;
  3900. _SOKOL_PRIVATE void _sg_init_pipeline(_sg_pipeline* pip) {
  3901. SOKOL_ASSERT(pip);
  3902. memset(pip, 0, sizeof(_sg_pipeline));
  3903. }
  3904. typedef struct {
  3905. _sg_image* image;
  3906. sg_image image_id;
  3907. int mip_level;
  3908. int slice;
  3909. } _sg_attachment;
  3910. typedef struct {
  3911. _sg_slot slot;
  3912. int num_color_atts;
  3913. _sg_attachment color_atts[SG_MAX_COLOR_ATTACHMENTS];
  3914. _sg_attachment ds_att;
  3915. ID3D11RenderTargetView* d3d11_rtvs[SG_MAX_COLOR_ATTACHMENTS];
  3916. ID3D11DepthStencilView* d3d11_dsv;
  3917. } _sg_pass;
  3918. _SOKOL_PRIVATE void _sg_init_pass(_sg_pass* pass) {
  3919. SOKOL_ASSERT(pass);
  3920. memset(pass, 0, sizeof(_sg_pass));
  3921. }
  3922. /*-- main D3D11 backend state and functions ----------------------------------*/
  3923. typedef struct {
  3924. bool valid;
  3925. ID3D11Device* dev;
  3926. ID3D11DeviceContext* ctx;
  3927. const void* (*rtv_cb)(void);
  3928. const void* (*dsv_cb)(void);
  3929. bool in_pass;
  3930. bool use_indexed_draw;
  3931. int cur_width;
  3932. int cur_height;
  3933. int num_rtvs;
  3934. _sg_pass* cur_pass;
  3935. sg_pass cur_pass_id;
  3936. _sg_pipeline* cur_pipeline;
  3937. sg_pipeline cur_pipeline_id;
  3938. ID3D11RenderTargetView* cur_rtvs[SG_MAX_COLOR_ATTACHMENTS];
  3939. ID3D11DepthStencilView* cur_dsv;
  3940. /* the following arrays are used for unbinding resources, they will always contain zeroes */
  3941. ID3D11RenderTargetView* zero_rtvs[SG_MAX_COLOR_ATTACHMENTS];
  3942. ID3D11Buffer* zero_vbs[SG_MAX_SHADERSTAGE_BUFFERS];
  3943. UINT zero_vb_offsets[SG_MAX_SHADERSTAGE_BUFFERS];
  3944. UINT zero_vb_strides[SG_MAX_SHADERSTAGE_BUFFERS];
  3945. ID3D11Buffer* zero_cbs[SG_MAX_SHADERSTAGE_UBS];
  3946. ID3D11ShaderResourceView* zero_srvs[SG_MAX_SHADERSTAGE_IMAGES];
  3947. ID3D11SamplerState* zero_smps[SG_MAX_SHADERSTAGE_IMAGES];
  3948. /* global subresourcedata array for texture updates */
  3949. D3D11_SUBRESOURCE_DATA subres_data[SG_MAX_MIPMAPS * SG_MAX_TEXTUREARRAY_LAYERS];
  3950. } _sg_backend;
  3951. static _sg_backend _sg_d3d11;
  3952. _SOKOL_PRIVATE void _sg_setup_backend(const sg_desc* desc) {
  3953. SOKOL_ASSERT(desc);
  3954. SOKOL_ASSERT(desc->d3d11_device);
  3955. SOKOL_ASSERT(desc->d3d11_device_context);
  3956. SOKOL_ASSERT(desc->d3d11_render_target_view_cb);
  3957. SOKOL_ASSERT(desc->d3d11_depth_stencil_view_cb);
  3958. SOKOL_ASSERT(desc->d3d11_render_target_view_cb != desc->d3d11_depth_stencil_view_cb);
  3959. memset(&_sg_d3d11, 0, sizeof(_sg_d3d11));
  3960. _sg_d3d11.valid = true;
  3961. _sg_d3d11.dev = (ID3D11Device*) desc->d3d11_device;
  3962. _sg_d3d11.ctx = (ID3D11DeviceContext*) desc->d3d11_device_context;
  3963. _sg_d3d11.rtv_cb = desc->d3d11_render_target_view_cb;
  3964. _sg_d3d11.dsv_cb = desc->d3d11_depth_stencil_view_cb;
  3965. }
  3966. _SOKOL_PRIVATE void _sg_discard_backend() {
  3967. SOKOL_ASSERT(_sg_d3d11.valid);
  3968. memset(&_sg_d3d11, 0, sizeof(_sg_d3d11));
  3969. }
  3970. _SOKOL_PRIVATE bool _sg_query_feature(sg_feature f) {
  3971. switch (f) {
  3972. case SG_FEATURE_INSTANCING:
  3973. case SG_FEATURE_TEXTURE_COMPRESSION_DXT:
  3974. case SG_FEATURE_TEXTURE_FLOAT:
  3975. case SG_FEATURE_TEXTURE_HALF_FLOAT:
  3976. case SG_FEATURE_ORIGIN_TOP_LEFT:
  3977. case SG_FEATURE_MSAA_RENDER_TARGETS:
  3978. case SG_FEATURE_MULTIPLE_RENDER_TARGET:
  3979. case SG_FEATURE_IMAGETYPE_3D:
  3980. case SG_FEATURE_IMAGETYPE_ARRAY:
  3981. return true;
  3982. default:
  3983. return false;
  3984. }
  3985. }
  3986. _SOKOL_PRIVATE void _sg_d3d11_clear_state() {
  3987. /* clear all the device context state, so that resource refs don't keep stuck in the d3d device context */
  3988. ID3D11DeviceContext_OMSetRenderTargets(_sg_d3d11.ctx, SG_MAX_COLOR_ATTACHMENTS, _sg_d3d11.zero_rtvs, NULL);
  3989. ID3D11DeviceContext_RSSetState(_sg_d3d11.ctx, NULL);
  3990. ID3D11DeviceContext_OMSetDepthStencilState(_sg_d3d11.ctx, NULL, 0);
  3991. ID3D11DeviceContext_OMSetBlendState(_sg_d3d11.ctx, NULL, NULL, 0xFFFFFFFF);
  3992. ID3D11DeviceContext_IASetVertexBuffers(_sg_d3d11.ctx, 0, SG_MAX_SHADERSTAGE_BUFFERS, _sg_d3d11.zero_vbs, _sg_d3d11.zero_vb_strides, _sg_d3d11.zero_vb_offsets);
  3993. ID3D11DeviceContext_IASetIndexBuffer(_sg_d3d11.ctx, NULL, DXGI_FORMAT_UNKNOWN, 0);
  3994. ID3D11DeviceContext_IASetInputLayout(_sg_d3d11.ctx, NULL);
  3995. ID3D11DeviceContext_VSSetShader(_sg_d3d11.ctx, NULL, NULL, 0);
  3996. ID3D11DeviceContext_PSSetShader(_sg_d3d11.ctx, NULL, NULL, 0);
  3997. ID3D11DeviceContext_VSSetConstantBuffers(_sg_d3d11.ctx, 0, SG_MAX_SHADERSTAGE_UBS, _sg_d3d11.zero_cbs);
  3998. ID3D11DeviceContext_PSSetConstantBuffers(_sg_d3d11.ctx, 0, SG_MAX_SHADERSTAGE_UBS, _sg_d3d11.zero_cbs);
  3999. ID3D11DeviceContext_VSSetShaderResources(_sg_d3d11.ctx, 0, SG_MAX_SHADERSTAGE_IMAGES, _sg_d3d11.zero_srvs);
  4000. ID3D11DeviceContext_PSSetShaderResources(_sg_d3d11.ctx, 0, SG_MAX_SHADERSTAGE_IMAGES, _sg_d3d11.zero_srvs);
  4001. ID3D11DeviceContext_VSSetSamplers(_sg_d3d11.ctx, 0, SG_MAX_SHADERSTAGE_IMAGES, _sg_d3d11.zero_smps);
  4002. ID3D11DeviceContext_PSSetSamplers(_sg_d3d11.ctx, 0, SG_MAX_SHADERSTAGE_IMAGES, _sg_d3d11.zero_smps);
  4003. }
  4004. _SOKOL_PRIVATE void _sg_create_buffer(_sg_buffer* buf, const sg_buffer_desc* desc) {
  4005. SOKOL_ASSERT(buf && desc);
  4006. SOKOL_ASSERT(buf->slot.state == SG_RESOURCESTATE_ALLOC);
  4007. SOKOL_ASSERT(!buf->d3d11_buf);
  4008. buf->size = desc->size;
  4009. buf->type = _sg_def(desc->type, SG_BUFFERTYPE_VERTEXBUFFER);
  4010. buf->usage = _sg_def(desc->usage, SG_USAGE_IMMUTABLE);
  4011. buf->upd_frame_index = 0;
  4012. const bool injected = (0 != desc->d3d11_buffer);
  4013. if (injected) {
  4014. buf->d3d11_buf = (ID3D11Buffer*) desc->d3d11_buffer;
  4015. ID3D11Buffer_AddRef(buf->d3d11_buf);
  4016. }
  4017. else {
  4018. D3D11_BUFFER_DESC d3d11_desc;
  4019. memset(&d3d11_desc, 0, sizeof(d3d11_desc));
  4020. d3d11_desc.ByteWidth = buf->size;
  4021. d3d11_desc.Usage = _sg_d3d11_usage(buf->usage);
  4022. d3d11_desc.BindFlags = buf->type == SG_BUFFERTYPE_VERTEXBUFFER ? D3D11_BIND_VERTEX_BUFFER : D3D11_BIND_INDEX_BUFFER;
  4023. d3d11_desc.CPUAccessFlags = _sg_d3d11_cpu_access_flags(buf->usage);
  4024. D3D11_SUBRESOURCE_DATA* init_data_ptr = 0;
  4025. D3D11_SUBRESOURCE_DATA init_data;
  4026. memset(&init_data, 0, sizeof(init_data));
  4027. if (buf->usage == SG_USAGE_IMMUTABLE) {
  4028. SOKOL_ASSERT(desc->content);
  4029. init_data.pSysMem = desc->content;
  4030. init_data_ptr = &init_data;
  4031. }
  4032. HRESULT hr = ID3D11Device_CreateBuffer(_sg_d3d11.dev, &d3d11_desc, init_data_ptr, &buf->d3d11_buf);
  4033. SOKOL_ASSERT(SUCCEEDED(hr) && buf->d3d11_buf);
  4034. }
  4035. buf->slot.state = SG_RESOURCESTATE_VALID;
  4036. }
  4037. _SOKOL_PRIVATE void _sg_destroy_buffer(_sg_buffer* buf) {
  4038. SOKOL_ASSERT(buf);
  4039. if (buf->d3d11_buf) {
  4040. ID3D11Buffer_Release(buf->d3d11_buf);
  4041. }
  4042. _sg_init_buffer(buf);
  4043. }
  4044. _SOKOL_PRIVATE void _sg_d3d11_fill_subres_data(const _sg_image* img, const sg_image_content* content) {
  4045. const int num_faces = (img->type == SG_IMAGETYPE_CUBE) ? 6:1;
  4046. const int num_slices = (img->type == SG_IMAGETYPE_ARRAY) ? img->depth:1;
  4047. int subres_index = 0;
  4048. for (int face_index = 0; face_index < num_faces; face_index++) {
  4049. for (int slice_index = 0; slice_index < num_slices; slice_index++) {
  4050. for (int mip_index = 0; mip_index < img->num_mipmaps; mip_index++, subres_index++) {
  4051. SOKOL_ASSERT(subres_index < (SG_MAX_MIPMAPS * SG_MAX_TEXTUREARRAY_LAYERS));
  4052. D3D11_SUBRESOURCE_DATA* subres_data = &_sg_d3d11.subres_data[subres_index];
  4053. const int mip_width = ((img->width>>mip_index)>0) ? img->width>>mip_index : 1;
  4054. const int mip_height = ((img->height>>mip_index)>0) ? img->height>>mip_index : 1;
  4055. const sg_subimage_content* subimg_content = &(content->subimage[face_index][mip_index]);
  4056. const int slice_size = subimg_content->size / num_slices;
  4057. const int slice_offset = slice_size * slice_index;
  4058. const uint8_t* ptr = (const uint8_t*) subimg_content->ptr;
  4059. subres_data->pSysMem = ptr + slice_offset;
  4060. subres_data->SysMemPitch = _sg_row_pitch(img->pixel_format, mip_width);
  4061. if (img->type == SG_IMAGETYPE_3D) {
  4062. const int mip_depth = ((img->depth>>mip_index)>0) ? img->depth>>mip_index : 1;
  4063. subres_data->SysMemSlicePitch = _sg_surface_pitch(img->pixel_format, mip_width, mip_height);
  4064. }
  4065. else {
  4066. subres_data->SysMemSlicePitch = 0;
  4067. }
  4068. }
  4069. }
  4070. }
  4071. }
  4072. _SOKOL_PRIVATE void _sg_create_image(_sg_image* img, const sg_image_desc* desc) {
  4073. SOKOL_ASSERT(img && desc);
  4074. SOKOL_ASSERT(img->slot.state == SG_RESOURCESTATE_ALLOC);
  4075. SOKOL_ASSERT(!img->d3d11_tex2d && !img->d3d11_tex3d && !img->d3d11_texds && !img->d3d11_texmsaa);
  4076. SOKOL_ASSERT(!img->d3d11_srv && !img->d3d11_smp);
  4077. HRESULT hr;
  4078. img->type = _sg_def(desc->type, SG_IMAGETYPE_2D);
  4079. img->render_target = desc->render_target;
  4080. img->width = desc->width;
  4081. img->height = desc->height;
  4082. img->depth = _sg_def(desc->depth, 1);
  4083. img->num_mipmaps = _sg_def(desc->num_mipmaps, 1);
  4084. img->usage = _sg_def(desc->usage, SG_USAGE_IMMUTABLE);
  4085. img->pixel_format = _sg_def(desc->pixel_format, SG_PIXELFORMAT_RGBA8);
  4086. img->sample_count = _sg_def(desc->sample_count, 1);
  4087. img->min_filter = _sg_def(desc->min_filter, SG_FILTER_NEAREST);
  4088. img->mag_filter = _sg_def(desc->mag_filter, SG_FILTER_NEAREST);
  4089. img->wrap_u = _sg_def(desc->wrap_u, SG_WRAP_REPEAT);
  4090. img->wrap_v = _sg_def(desc->wrap_v, SG_WRAP_REPEAT);
  4091. img->wrap_w = _sg_def(desc->wrap_w, SG_WRAP_REPEAT);
  4092. img->max_anisotropy = _sg_def(desc->max_anisotropy, 1);
  4093. img->upd_frame_index = 0;
  4094. const bool injected = (0 != desc->d3d11_texture);
  4095. /* special case depth-stencil buffer? */
  4096. if (_sg_is_valid_rendertarget_depth_format(img->pixel_format)) {
  4097. /* create only a depth-texture */
  4098. SOKOL_ASSERT(!injected);
  4099. img->d3d11_format = _sg_d3d11_rendertarget_depth_format(img->pixel_format);
  4100. if (img->d3d11_format == DXGI_FORMAT_UNKNOWN) {
  4101. /* trying to create a texture format that's not supported by D3D */
  4102. SOKOL_LOG("trying to create a D3D11 depth-texture with unsupported pixel format\n");
  4103. img->slot.state = SG_RESOURCESTATE_FAILED;
  4104. return;
  4105. }
  4106. D3D11_TEXTURE2D_DESC d3d11_desc;
  4107. memset(&d3d11_desc, 0, sizeof(d3d11_desc));
  4108. d3d11_desc.Width = img->width;
  4109. d3d11_desc.Height = img->height;
  4110. d3d11_desc.MipLevels = 1;
  4111. d3d11_desc.ArraySize = 1;
  4112. d3d11_desc.Format = img->d3d11_format;
  4113. d3d11_desc.Usage = D3D11_USAGE_DEFAULT;
  4114. d3d11_desc.BindFlags = D3D11_BIND_DEPTH_STENCIL;
  4115. d3d11_desc.SampleDesc.Count = img->sample_count;
  4116. d3d11_desc.SampleDesc.Quality = (img->sample_count > 1) ? D3D11_STANDARD_MULTISAMPLE_PATTERN : 0;
  4117. hr = ID3D11Device_CreateTexture2D(_sg_d3d11.dev, &d3d11_desc, NULL, &img->d3d11_texds);
  4118. SOKOL_ASSERT(SUCCEEDED(hr) && img->d3d11_texds);
  4119. }
  4120. else {
  4121. /* create (or inject) color texture */
  4122. /* prepare initial content pointers */
  4123. D3D11_SUBRESOURCE_DATA* init_data = 0;
  4124. if (!injected && (img->usage == SG_USAGE_IMMUTABLE) && !img->render_target) {
  4125. _sg_d3d11_fill_subres_data(img, &desc->content);
  4126. init_data = _sg_d3d11.subres_data;
  4127. }
  4128. if (img->type != SG_IMAGETYPE_3D) {
  4129. /* 2D-, cube- or array-texture */
  4130. /* if this is an MSAA render target, the following texture will be the 'resolve-texture' */
  4131. D3D11_TEXTURE2D_DESC d3d11_tex_desc;
  4132. memset(&d3d11_tex_desc, 0, sizeof(d3d11_tex_desc));
  4133. d3d11_tex_desc.Width = img->width;
  4134. d3d11_tex_desc.Height = img->height;
  4135. d3d11_tex_desc.MipLevels = img->num_mipmaps;
  4136. switch (img->type) {
  4137. case SG_IMAGETYPE_ARRAY: d3d11_tex_desc.ArraySize = img->depth; break;
  4138. case SG_IMAGETYPE_CUBE: d3d11_tex_desc.ArraySize = 6; break;
  4139. default: d3d11_tex_desc.ArraySize = 1; break;
  4140. }
  4141. d3d11_tex_desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
  4142. if (img->render_target) {
  4143. img->d3d11_format = _sg_d3d11_rendertarget_color_format(img->pixel_format);
  4144. d3d11_tex_desc.Format = img->d3d11_format;
  4145. d3d11_tex_desc.Usage = D3D11_USAGE_DEFAULT;
  4146. if (img->sample_count == 1) {
  4147. d3d11_tex_desc.BindFlags |= D3D11_BIND_RENDER_TARGET;
  4148. }
  4149. d3d11_tex_desc.CPUAccessFlags = 0;
  4150. }
  4151. else {
  4152. img->d3d11_format = _sg_d3d11_texture_format(img->pixel_format);
  4153. d3d11_tex_desc.Format = img->d3d11_format;
  4154. d3d11_tex_desc.Usage = _sg_d3d11_usage(img->usage);
  4155. d3d11_tex_desc.CPUAccessFlags = _sg_d3d11_cpu_access_flags(img->usage);
  4156. }
  4157. if (img->d3d11_format == DXGI_FORMAT_UNKNOWN) {
  4158. /* trying to create a texture format that's not supported by D3D */
  4159. SOKOL_LOG("trying to create a D3D11 texture with unsupported pixel format\n");
  4160. img->slot.state = SG_RESOURCESTATE_FAILED;
  4161. return;
  4162. }
  4163. d3d11_tex_desc.SampleDesc.Count = 1;
  4164. d3d11_tex_desc.SampleDesc.Quality = 0;
  4165. d3d11_tex_desc.MiscFlags = (img->type == SG_IMAGETYPE_CUBE) ? D3D11_RESOURCE_MISC_TEXTURECUBE : 0;
  4166. if (injected) {
  4167. img->d3d11_tex2d = (ID3D11Texture2D*) desc->d3d11_texture;
  4168. ID3D11Texture2D_AddRef(img->d3d11_tex2d);
  4169. }
  4170. else {
  4171. hr = ID3D11Device_CreateTexture2D(_sg_d3d11.dev, &d3d11_tex_desc, init_data, &img->d3d11_tex2d);
  4172. SOKOL_ASSERT(SUCCEEDED(hr) && img->d3d11_tex2d);
  4173. }
  4174. /* also need to create a separate MSAA render target texture? */
  4175. if (img->sample_count > 1) {
  4176. d3d11_tex_desc.BindFlags |= D3D11_BIND_RENDER_TARGET;
  4177. d3d11_tex_desc.SampleDesc.Count = img->sample_count;
  4178. d3d11_tex_desc.SampleDesc.Quality = D3D11_STANDARD_MULTISAMPLE_PATTERN;
  4179. hr = ID3D11Device_CreateTexture2D(_sg_d3d11.dev, &d3d11_tex_desc, NULL, &img->d3d11_texmsaa);
  4180. SOKOL_ASSERT(SUCCEEDED(hr) && img->d3d11_texmsaa);
  4181. }
  4182. /* shader-resource-view */
  4183. D3D11_SHADER_RESOURCE_VIEW_DESC d3d11_srv_desc;
  4184. memset(&d3d11_srv_desc, 0, sizeof(d3d11_srv_desc));
  4185. d3d11_srv_desc.Format = d3d11_tex_desc.Format;
  4186. switch (img->type) {
  4187. case SG_IMAGETYPE_2D:
  4188. d3d11_srv_desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
  4189. d3d11_srv_desc.Texture2D.MipLevels = img->num_mipmaps;
  4190. break;
  4191. case SG_IMAGETYPE_CUBE:
  4192. d3d11_srv_desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURECUBE;
  4193. d3d11_srv_desc.TextureCube.MipLevels = img->num_mipmaps;
  4194. break;
  4195. case SG_IMAGETYPE_ARRAY:
  4196. d3d11_srv_desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2DARRAY;
  4197. d3d11_srv_desc.Texture2DArray.MipLevels = img->num_mipmaps;
  4198. d3d11_srv_desc.Texture2DArray.ArraySize = img->depth;
  4199. break;
  4200. default:
  4201. SOKOL_UNREACHABLE; break;
  4202. }
  4203. hr = ID3D11Device_CreateShaderResourceView(_sg_d3d11.dev, (ID3D11Resource*)img->d3d11_tex2d, &d3d11_srv_desc, &img->d3d11_srv);
  4204. SOKOL_ASSERT(SUCCEEDED(hr) && img->d3d11_srv);
  4205. }
  4206. else {
  4207. /* 3D texture */
  4208. D3D11_TEXTURE3D_DESC d3d11_tex_desc;
  4209. memset(&d3d11_tex_desc, 0, sizeof(d3d11_tex_desc));
  4210. d3d11_tex_desc.Width = img->width;
  4211. d3d11_tex_desc.Height = img->height;
  4212. d3d11_tex_desc.Depth = img->depth;
  4213. d3d11_tex_desc.MipLevels = img->num_mipmaps;
  4214. if (img->render_target) {
  4215. img->d3d11_format = _sg_d3d11_rendertarget_color_format(img->pixel_format);
  4216. d3d11_tex_desc.Format = img->d3d11_format;
  4217. d3d11_tex_desc.Usage = D3D11_USAGE_DEFAULT;
  4218. d3d11_tex_desc.BindFlags = D3D11_BIND_SHADER_RESOURCE|D3D11_BIND_RENDER_TARGET;
  4219. d3d11_tex_desc.CPUAccessFlags = 0;
  4220. }
  4221. else {
  4222. img->d3d11_format = _sg_d3d11_texture_format(img->pixel_format);
  4223. d3d11_tex_desc.Format = img->d3d11_format;
  4224. d3d11_tex_desc.Usage = _sg_d3d11_usage(img->usage);
  4225. d3d11_tex_desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
  4226. d3d11_tex_desc.CPUAccessFlags = _sg_d3d11_cpu_access_flags(img->usage);
  4227. }
  4228. if (img->d3d11_format == DXGI_FORMAT_UNKNOWN) {
  4229. /* trying to create a texture format that's not supported by D3D */
  4230. SOKOL_LOG("trying to create a D3D11 texture with unsupported pixel format\n");
  4231. img->slot.state = SG_RESOURCESTATE_FAILED;
  4232. return;
  4233. }
  4234. if (injected) {
  4235. img->d3d11_tex3d = (ID3D11Texture3D*) desc->d3d11_texture;
  4236. ID3D11Texture3D_AddRef(img->d3d11_tex3d);
  4237. }
  4238. else {
  4239. hr = ID3D11Device_CreateTexture3D(_sg_d3d11.dev, &d3d11_tex_desc, init_data, &img->d3d11_tex3d);
  4240. SOKOL_ASSERT(SUCCEEDED(hr) && img->d3d11_tex3d);
  4241. }
  4242. /* shader resource view for 3d texture */
  4243. D3D11_SHADER_RESOURCE_VIEW_DESC d3d11_srv_desc;
  4244. memset(&d3d11_srv_desc, 0, sizeof(d3d11_srv_desc));
  4245. d3d11_srv_desc.Format = d3d11_tex_desc.Format;
  4246. d3d11_srv_desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE3D;
  4247. d3d11_srv_desc.Texture3D.MipLevels = img->num_mipmaps;
  4248. hr = ID3D11Device_CreateShaderResourceView(_sg_d3d11.dev, (ID3D11Resource*)img->d3d11_tex3d, &d3d11_srv_desc, &img->d3d11_srv);
  4249. SOKOL_ASSERT(SUCCEEDED(hr) && img->d3d11_srv);
  4250. }
  4251. /* sampler state object, note D3D11 implements an internal shared-pool for sampler objects */
  4252. D3D11_SAMPLER_DESC d3d11_smp_desc;
  4253. memset(&d3d11_smp_desc, 0, sizeof(d3d11_smp_desc));
  4254. d3d11_smp_desc.Filter = _sg_d3d11_filter(img->min_filter, img->mag_filter, img->max_anisotropy);
  4255. d3d11_smp_desc.AddressU = _sg_d3d11_address_mode(img->wrap_u);
  4256. d3d11_smp_desc.AddressV = _sg_d3d11_address_mode(img->wrap_v);
  4257. d3d11_smp_desc.AddressW = _sg_d3d11_address_mode(img->wrap_w);
  4258. d3d11_smp_desc.MaxAnisotropy = img->max_anisotropy;
  4259. d3d11_smp_desc.ComparisonFunc = D3D11_COMPARISON_NEVER;
  4260. d3d11_smp_desc.MinLOD = desc->min_lod;
  4261. d3d11_smp_desc.MaxLOD = _sg_def_flt(desc->max_lod, D3D11_FLOAT32_MAX);
  4262. hr = ID3D11Device_CreateSamplerState(_sg_d3d11.dev, &d3d11_smp_desc, &img->d3d11_smp);
  4263. SOKOL_ASSERT(SUCCEEDED(hr) && img->d3d11_smp);
  4264. }
  4265. img->slot.state = SG_RESOURCESTATE_VALID;
  4266. }
  4267. _SOKOL_PRIVATE void _sg_destroy_image(_sg_image* img) {
  4268. SOKOL_ASSERT(img);
  4269. if (img->d3d11_tex2d) {
  4270. ID3D11Texture2D_Release(img->d3d11_tex2d);
  4271. }
  4272. if (img->d3d11_tex3d) {
  4273. ID3D11Texture3D_Release(img->d3d11_tex3d);
  4274. }
  4275. if (img->d3d11_texds) {
  4276. ID3D11Texture2D_Release(img->d3d11_texds);
  4277. }
  4278. if (img->d3d11_texmsaa) {
  4279. ID3D11Texture2D_Release(img->d3d11_texmsaa);
  4280. }
  4281. if (img->d3d11_srv) {
  4282. ID3D11ShaderResourceView_Release(img->d3d11_srv);
  4283. }
  4284. if (img->d3d11_smp) {
  4285. ID3D11SamplerState_Release(img->d3d11_smp);
  4286. }
  4287. _sg_init_image(img);
  4288. }
  4289. #if defined(SOKOL_D3D11_SHADER_COMPILER)
  4290. _SOKOL_PRIVATE ID3DBlob* _sg_d3d11_compile_shader(const sg_shader_stage_desc* stage_desc, const char* target) {
  4291. ID3DBlob* output = NULL;
  4292. ID3DBlob* errors = NULL;
  4293. HRESULT hr = D3DCompile(
  4294. stage_desc->source, /* pSrcData */
  4295. strlen(stage_desc->source), /* SrcDataSize */
  4296. NULL, /* pSourceName */
  4297. NULL, /* pDefines */
  4298. NULL, /* pInclude */
  4299. stage_desc->entry ? stage_desc->entry : "main", /* pEntryPoint */
  4300. target, /* pTarget (vs_5_0 or ps_5_0) */
  4301. D3DCOMPILE_PACK_MATRIX_COLUMN_MAJOR | D3DCOMPILE_OPTIMIZATION_LEVEL3, /* Flags1 */
  4302. 0, /* Flags2 */
  4303. &output, /* ppCode */
  4304. &errors); /* ppErrorMsgs */
  4305. if (errors) {
  4306. SOKOL_LOG((LPCSTR)ID3D10Blob_GetBufferPointer(errors));
  4307. ID3D10Blob_Release(errors); errors = NULL;
  4308. }
  4309. return output;
  4310. }
  4311. #endif
  4312. #define _sg_d3d11_roundup(val, round_to) (((val)+((round_to)-1))&~((round_to)-1))
  4313. _SOKOL_PRIVATE void _sg_create_shader(_sg_shader* shd, const sg_shader_desc* desc) {
  4314. SOKOL_ASSERT(shd && desc);
  4315. SOKOL_ASSERT(shd->slot.state == SG_RESOURCESTATE_ALLOC);
  4316. SOKOL_ASSERT(!shd->d3d11_vs && !shd->d3d11_fs && !shd->d3d11_vs_blob);
  4317. HRESULT hr;
  4318. /* shader stage uniform blocks and image slots */
  4319. for (int stage_index = 0; stage_index < SG_NUM_SHADER_STAGES; stage_index++) {
  4320. const sg_shader_stage_desc* stage_desc = (stage_index == SG_SHADERSTAGE_VS) ? &desc->vs : &desc->fs;
  4321. _sg_shader_stage* stage = &shd->stage[stage_index];
  4322. SOKOL_ASSERT(stage->num_uniform_blocks == 0);
  4323. for (int ub_index = 0; ub_index < SG_MAX_SHADERSTAGE_UBS; ub_index++) {
  4324. const sg_shader_uniform_block_desc* ub_desc = &stage_desc->uniform_blocks[ub_index];
  4325. if (0 == ub_desc->size) {
  4326. break;
  4327. }
  4328. _sg_uniform_block* ub = &stage->uniform_blocks[ub_index];
  4329. ub->size = ub_desc->size;
  4330. /* create a D3D constant buffer */
  4331. SOKOL_ASSERT(!stage->d3d11_cbs[ub_index]);
  4332. D3D11_BUFFER_DESC cb_desc;
  4333. memset(&cb_desc, 0, sizeof(cb_desc));
  4334. cb_desc.ByteWidth = _sg_d3d11_roundup(ub->size, 16);
  4335. cb_desc.Usage = D3D11_USAGE_DEFAULT;
  4336. cb_desc.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
  4337. hr = ID3D11Device_CreateBuffer(_sg_d3d11.dev, &cb_desc, NULL, &stage->d3d11_cbs[ub_index]);
  4338. SOKOL_ASSERT(SUCCEEDED(hr) && stage->d3d11_cbs[ub_index]);
  4339. stage->num_uniform_blocks++;
  4340. }
  4341. SOKOL_ASSERT(stage->num_images == 0);
  4342. for (int img_index = 0; img_index < SG_MAX_SHADERSTAGE_IMAGES; img_index++) {
  4343. const sg_shader_image_desc* img_desc = &stage_desc->images[img_index];
  4344. if (img_desc->type == _SG_IMAGETYPE_DEFAULT) {
  4345. break;
  4346. }
  4347. stage->images[img_index].type = img_desc->type;
  4348. stage->num_images++;
  4349. }
  4350. }
  4351. const void* vs_ptr = 0, *fs_ptr = 0;
  4352. SIZE_T vs_length = 0, fs_length = 0;
  4353. #if defined(SOKOL_D3D11_SHADER_COMPILER)
  4354. ID3DBlob* vs_blob = 0, *fs_blob = 0;
  4355. #endif
  4356. if (desc->vs.byte_code && desc->fs.byte_code) {
  4357. /* create from byte code */
  4358. vs_ptr = desc->vs.byte_code;
  4359. fs_ptr = desc->fs.byte_code;
  4360. vs_length = desc->vs.byte_code_size;
  4361. fs_length = desc->fs.byte_code_size;
  4362. }
  4363. else {
  4364. /* compile shader code */
  4365. #if defined(SOKOL_D3D11_SHADER_COMPILER)
  4366. vs_blob = _sg_d3d11_compile_shader(&desc->vs, "vs_5_0");
  4367. fs_blob = _sg_d3d11_compile_shader(&desc->fs, "ps_5_0");
  4368. if (vs_blob && fs_blob) {
  4369. vs_ptr = ID3D10Blob_GetBufferPointer(vs_blob);
  4370. vs_length = ID3D10Blob_GetBufferSize(vs_blob);
  4371. fs_ptr = ID3D10Blob_GetBufferPointer(fs_blob);
  4372. fs_length = ID3D10Blob_GetBufferSize(fs_blob);
  4373. }
  4374. #endif
  4375. }
  4376. if (vs_ptr && fs_ptr && (vs_length > 0) && (fs_length > 0)) {
  4377. /* create the D3D vertex- and pixel-shader objects */
  4378. hr = ID3D11Device_CreateVertexShader(_sg_d3d11.dev, vs_ptr, vs_length, NULL, &shd->d3d11_vs);
  4379. SOKOL_ASSERT(SUCCEEDED(hr) && shd->d3d11_vs);
  4380. hr = ID3D11Device_CreatePixelShader(_sg_d3d11.dev, fs_ptr, fs_length, NULL, &shd->d3d11_fs);
  4381. SOKOL_ASSERT(SUCCEEDED(hr) && shd->d3d11_fs);
  4382. /* need to store the vertex shader byte code, this is needed later in sg_create_pipeline */
  4383. shd->d3d11_vs_blob_length = (int)vs_length;
  4384. shd->d3d11_vs_blob = SOKOL_MALLOC((int)vs_length);
  4385. SOKOL_ASSERT(shd->d3d11_vs_blob);
  4386. memcpy(shd->d3d11_vs_blob, vs_ptr, vs_length);
  4387. shd->slot.state = SG_RESOURCESTATE_VALID;
  4388. }
  4389. else {
  4390. shd->slot.state = SG_RESOURCESTATE_FAILED;
  4391. }
  4392. #if defined(SOKOL_D3D11_SHADER_COMPILER)
  4393. if (vs_blob) {
  4394. ID3D10Blob_Release(vs_blob); vs_blob = 0;
  4395. }
  4396. if (fs_blob) {
  4397. ID3D10Blob_Release(fs_blob); fs_blob = 0;
  4398. }
  4399. #endif
  4400. }
  4401. _SOKOL_PRIVATE void _sg_destroy_shader(_sg_shader* shd) {
  4402. SOKOL_ASSERT(shd);
  4403. if (shd->d3d11_vs) {
  4404. ID3D11VertexShader_Release(shd->d3d11_vs);
  4405. }
  4406. if (shd->d3d11_fs) {
  4407. ID3D11PixelShader_Release(shd->d3d11_fs);
  4408. }
  4409. if (shd->d3d11_vs_blob) {
  4410. SOKOL_FREE(shd->d3d11_vs_blob);
  4411. }
  4412. for (int stage_index = 0; stage_index < SG_NUM_SHADER_STAGES; stage_index++) {
  4413. _sg_shader_stage* stage = &shd->stage[stage_index];
  4414. for (int ub_index = 0; ub_index < stage->num_uniform_blocks; ub_index++) {
  4415. if (stage->d3d11_cbs[ub_index]) {
  4416. ID3D11Buffer_Release(stage->d3d11_cbs[ub_index]);
  4417. }
  4418. }
  4419. }
  4420. _sg_init_shader(shd);
  4421. }
  4422. _SOKOL_PRIVATE void _sg_create_pipeline(_sg_pipeline* pip, _sg_shader* shd, const sg_pipeline_desc* desc) {
  4423. SOKOL_ASSERT(pip && shd && desc);
  4424. SOKOL_ASSERT(pip->slot.state == SG_RESOURCESTATE_ALLOC);
  4425. SOKOL_ASSERT(desc->shader.id == shd->slot.id);
  4426. SOKOL_ASSERT(shd->slot.state == SG_RESOURCESTATE_VALID);
  4427. SOKOL_ASSERT(shd->d3d11_vs_blob && shd->d3d11_vs_blob_length > 0);
  4428. SOKOL_ASSERT(!pip->d3d11_il && !pip->d3d11_rs && !pip->d3d11_dss && !pip->d3d11_bs);
  4429. HRESULT hr;
  4430. pip->shader = shd;
  4431. pip->shader_id = desc->shader;
  4432. pip->index_type = _sg_def(desc->index_type, SG_INDEXTYPE_NONE);
  4433. pip->color_attachment_count = _sg_def(desc->blend.color_attachment_count, 1);
  4434. pip->color_format = _sg_def(desc->blend.color_format, SG_PIXELFORMAT_RGBA8);
  4435. pip->depth_format = _sg_def(desc->blend.depth_format, SG_PIXELFORMAT_DEPTHSTENCIL);
  4436. pip->sample_count = _sg_def(desc->rasterizer.sample_count, 1);
  4437. pip->d3d11_index_format = _sg_d3d11_index_format(pip->index_type);
  4438. pip->d3d11_topology = _sg_d3d11_primitive_topology(_sg_def(desc->primitive_type, SG_PRIMITIVETYPE_TRIANGLES));
  4439. for (int i = 0; i < 4; i++) {
  4440. pip->blend_color[i] = desc->blend.blend_color[i];
  4441. }
  4442. pip->d3d11_stencil_ref = desc->depth_stencil.stencil_ref;
  4443. /* create input layout object */
  4444. int auto_offset[SG_MAX_SHADERSTAGE_BUFFERS];
  4445. for (int layout_index = 0; layout_index < SG_MAX_SHADERSTAGE_BUFFERS; layout_index++) {
  4446. auto_offset[layout_index] = 0;
  4447. }
  4448. bool use_auto_offset = true;
  4449. for (int attr_index = 0; attr_index < SG_MAX_VERTEX_ATTRIBUTES; attr_index++) {
  4450. /* to use computed offsets, all attr offsets must be 0 */
  4451. if (desc->layout.attrs[attr_index].offset != 0) {
  4452. use_auto_offset = false;
  4453. }
  4454. }
  4455. D3D11_INPUT_ELEMENT_DESC d3d11_comps[SG_MAX_VERTEX_ATTRIBUTES];
  4456. memset(d3d11_comps, 0, sizeof(d3d11_comps));
  4457. int attr_index = 0;
  4458. for (; attr_index < SG_MAX_VERTEX_ATTRIBUTES; attr_index++) {
  4459. const sg_vertex_attr_desc* a_desc = &desc->layout.attrs[attr_index];
  4460. if (a_desc->format == SG_VERTEXFORMAT_INVALID) {
  4461. break;
  4462. }
  4463. SOKOL_ASSERT((a_desc->buffer_index >= 0) && (a_desc->buffer_index < SG_MAX_SHADERSTAGE_BUFFERS));
  4464. const sg_buffer_layout_desc* l_desc = &desc->layout.buffers[a_desc->buffer_index];
  4465. const sg_vertex_step step_func = _sg_def(l_desc->step_func, SG_VERTEXSTEP_PER_VERTEX);
  4466. const int step_rate = _sg_def(l_desc->step_rate, 1);
  4467. D3D11_INPUT_ELEMENT_DESC* d3d11_comp = &d3d11_comps[attr_index];
  4468. d3d11_comp->SemanticName = a_desc->sem_name;
  4469. d3d11_comp->SemanticIndex = a_desc->sem_index;
  4470. d3d11_comp->Format = _sg_d3d11_vertex_format(a_desc->format);
  4471. d3d11_comp->InputSlot = a_desc->buffer_index;
  4472. d3d11_comp->AlignedByteOffset = use_auto_offset ? auto_offset[a_desc->buffer_index] : a_desc->offset;
  4473. d3d11_comp->InputSlotClass = _sg_d3d11_input_classification(step_func);
  4474. if (SG_VERTEXSTEP_PER_INSTANCE == step_func) {
  4475. d3d11_comp->InstanceDataStepRate = step_rate;
  4476. }
  4477. auto_offset[a_desc->buffer_index] += _sg_vertexformat_bytesize(a_desc->format);
  4478. pip->vertex_layout_valid[a_desc->buffer_index] = true;
  4479. }
  4480. for (int layout_index = 0; layout_index < SG_MAX_SHADERSTAGE_BUFFERS; layout_index++) {
  4481. if (pip->vertex_layout_valid[layout_index]) {
  4482. const sg_buffer_layout_desc* l_desc = &desc->layout.buffers[layout_index];
  4483. const int stride = l_desc->stride ? l_desc->stride : auto_offset[layout_index];
  4484. SOKOL_ASSERT(stride > 0);
  4485. pip->d3d11_vb_strides[layout_index] = stride;
  4486. }
  4487. else {
  4488. pip->d3d11_vb_strides[layout_index] = 0;
  4489. }
  4490. }
  4491. hr = ID3D11Device_CreateInputLayout(_sg_d3d11.dev,
  4492. d3d11_comps, /* pInputElementDesc */
  4493. attr_index, /* NumElements */
  4494. shd->d3d11_vs_blob, /* pShaderByteCodeWithInputSignature */
  4495. shd->d3d11_vs_blob_length, /* BytecodeLength */
  4496. &pip->d3d11_il);
  4497. SOKOL_ASSERT(SUCCEEDED(hr) && pip->d3d11_il);
  4498. /* create rasterizer state */
  4499. D3D11_RASTERIZER_DESC rs_desc;
  4500. memset(&rs_desc, 0, sizeof(rs_desc));
  4501. rs_desc.FillMode = D3D11_FILL_SOLID;
  4502. rs_desc.CullMode = _sg_d3d11_cull_mode(_sg_def(desc->rasterizer.cull_mode, SG_CULLMODE_NONE));
  4503. rs_desc.FrontCounterClockwise = _sg_def(desc->rasterizer.face_winding, SG_FACEWINDING_CW) == SG_FACEWINDING_CCW;
  4504. rs_desc.DepthBias = (INT) desc->rasterizer.depth_bias;
  4505. rs_desc.DepthBiasClamp = desc->rasterizer.depth_bias_clamp;
  4506. rs_desc.SlopeScaledDepthBias = desc->rasterizer.depth_bias_slope_scale;
  4507. rs_desc.DepthClipEnable = TRUE;
  4508. rs_desc.ScissorEnable = TRUE;
  4509. rs_desc.MultisampleEnable = _sg_def(desc->rasterizer.sample_count, 1) > 1;
  4510. rs_desc.AntialiasedLineEnable = FALSE;
  4511. hr = ID3D11Device_CreateRasterizerState(_sg_d3d11.dev, &rs_desc, &pip->d3d11_rs);
  4512. SOKOL_ASSERT(SUCCEEDED(hr) && pip->d3d11_rs);
  4513. /* create depth-stencil state */
  4514. D3D11_DEPTH_STENCIL_DESC dss_desc;
  4515. memset(&dss_desc, 0, sizeof(dss_desc));
  4516. dss_desc.DepthEnable = TRUE;
  4517. dss_desc.DepthWriteMask = desc->depth_stencil.depth_write_enabled ? D3D11_DEPTH_WRITE_MASK_ALL : D3D11_DEPTH_WRITE_MASK_ZERO;
  4518. dss_desc.DepthFunc = _sg_d3d11_compare_func(_sg_def(desc->depth_stencil.depth_compare_func, SG_COMPAREFUNC_ALWAYS));
  4519. dss_desc.StencilEnable = desc->depth_stencil.stencil_enabled;
  4520. dss_desc.StencilReadMask = desc->depth_stencil.stencil_read_mask;
  4521. dss_desc.StencilWriteMask = desc->depth_stencil.stencil_write_mask;
  4522. const sg_stencil_state* sf = &desc->depth_stencil.stencil_front;
  4523. dss_desc.FrontFace.StencilFailOp = _sg_d3d11_stencil_op(_sg_def(sf->fail_op, SG_STENCILOP_KEEP));
  4524. dss_desc.FrontFace.StencilDepthFailOp = _sg_d3d11_stencil_op(_sg_def(sf->depth_fail_op, SG_STENCILOP_KEEP));
  4525. dss_desc.FrontFace.StencilPassOp = _sg_d3d11_stencil_op(_sg_def(sf->pass_op, SG_STENCILOP_KEEP));
  4526. dss_desc.FrontFace.StencilFunc = _sg_d3d11_compare_func(_sg_def(sf->compare_func, SG_COMPAREFUNC_ALWAYS));
  4527. const sg_stencil_state* sb = &desc->depth_stencil.stencil_back;
  4528. dss_desc.BackFace.StencilFailOp = _sg_d3d11_stencil_op(_sg_def(sb->fail_op, SG_STENCILOP_KEEP));
  4529. dss_desc.BackFace.StencilDepthFailOp = _sg_d3d11_stencil_op(_sg_def(sb->depth_fail_op, SG_STENCILOP_KEEP));
  4530. dss_desc.BackFace.StencilPassOp = _sg_d3d11_stencil_op(_sg_def(sb->pass_op, SG_STENCILOP_KEEP));
  4531. dss_desc.BackFace.StencilFunc = _sg_d3d11_compare_func(_sg_def(sb->compare_func, SG_COMPAREFUNC_ALWAYS));
  4532. hr = ID3D11Device_CreateDepthStencilState(_sg_d3d11.dev, &dss_desc, &pip->d3d11_dss);
  4533. SOKOL_ASSERT(SUCCEEDED(hr) && pip->d3d11_dss);
  4534. /* create blend state */
  4535. D3D11_BLEND_DESC bs_desc;
  4536. memset(&bs_desc, 0, sizeof(bs_desc));
  4537. bs_desc.AlphaToCoverageEnable = desc->rasterizer.alpha_to_coverage_enabled;
  4538. bs_desc.IndependentBlendEnable = FALSE;
  4539. bs_desc.RenderTarget[0].BlendEnable = desc->blend.enabled;
  4540. bs_desc.RenderTarget[0].SrcBlend = _sg_d3d11_blend_factor(_sg_def(desc->blend.src_factor_rgb, SG_BLENDFACTOR_ONE));
  4541. bs_desc.RenderTarget[0].DestBlend = _sg_d3d11_blend_factor(_sg_def(desc->blend.dst_factor_rgb, SG_BLENDFACTOR_ZERO));
  4542. bs_desc.RenderTarget[0].BlendOp = _sg_d3d11_blend_op(_sg_def(desc->blend.op_rgb, SG_BLENDOP_ADD));
  4543. bs_desc.RenderTarget[0].SrcBlendAlpha = _sg_d3d11_blend_factor(_sg_def(desc->blend.src_factor_alpha, SG_BLENDFACTOR_ONE));
  4544. bs_desc.RenderTarget[0].DestBlendAlpha = _sg_d3d11_blend_factor(_sg_def(desc->blend.dst_factor_alpha, SG_BLENDFACTOR_ZERO));
  4545. bs_desc.RenderTarget[0].BlendOpAlpha = _sg_d3d11_blend_op(_sg_def(desc->blend.op_alpha, SG_BLENDOP_ADD));
  4546. bs_desc.RenderTarget[0].RenderTargetWriteMask = _sg_d3d11_color_write_mask(_sg_def((sg_color_mask)desc->blend.color_write_mask, SG_COLORMASK_RGBA));
  4547. hr = ID3D11Device_CreateBlendState(_sg_d3d11.dev, &bs_desc, &pip->d3d11_bs);
  4548. SOKOL_ASSERT(SUCCEEDED(hr) && pip->d3d11_bs);
  4549. pip->slot.state = SG_RESOURCESTATE_VALID;
  4550. }
  4551. _SOKOL_PRIVATE void _sg_destroy_pipeline(_sg_pipeline* pip) {
  4552. SOKOL_ASSERT(pip);
  4553. if (pip->d3d11_il) {
  4554. ID3D11InputLayout_Release(pip->d3d11_il);
  4555. }
  4556. if (pip->d3d11_rs) {
  4557. ID3D11RasterizerState_Release(pip->d3d11_rs);
  4558. }
  4559. if (pip->d3d11_dss) {
  4560. ID3D11DepthStencilState_Release(pip->d3d11_dss);
  4561. }
  4562. if (pip->d3d11_bs) {
  4563. ID3D11BlendState_Release(pip->d3d11_bs);
  4564. }
  4565. _sg_init_pipeline(pip);
  4566. }
  4567. _SOKOL_PRIVATE void _sg_create_pass(_sg_pass* pass, _sg_image** att_images, const sg_pass_desc* desc) {
  4568. SOKOL_ASSERT(pass && desc);
  4569. SOKOL_ASSERT(pass->slot.state == SG_RESOURCESTATE_ALLOC);
  4570. SOKOL_ASSERT(att_images && att_images[0]);
  4571. SOKOL_ASSERT(_sg_d3d11.dev);
  4572. const sg_attachment_desc* att_desc;
  4573. _sg_attachment* att;
  4574. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  4575. SOKOL_ASSERT(0 == pass->color_atts[i].image);
  4576. SOKOL_ASSERT(pass->d3d11_rtvs[i] == 0);
  4577. att_desc = &desc->color_attachments[i];
  4578. if (att_desc->image.id != SG_INVALID_ID) {
  4579. pass->num_color_atts++;
  4580. SOKOL_ASSERT(att_images[i] && (att_images[i]->slot.id == att_desc->image.id));
  4581. SOKOL_ASSERT(_sg_is_valid_rendertarget_color_format(att_images[i]->pixel_format));
  4582. att = &pass->color_atts[i];
  4583. SOKOL_ASSERT((att->image == 0) && (att->image_id.id == SG_INVALID_ID));
  4584. att->image = att_images[i];
  4585. att->image_id = att_desc->image;
  4586. att->mip_level = att_desc->mip_level;
  4587. att->slice = att_desc->slice;
  4588. /* create D3D11 render-target-view */
  4589. ID3D11Resource* d3d11_res = 0;
  4590. const bool is_msaa = att->image->sample_count > 1;
  4591. D3D11_RENDER_TARGET_VIEW_DESC d3d11_rtv_desc;
  4592. memset(&d3d11_rtv_desc, 0, sizeof(d3d11_rtv_desc));
  4593. d3d11_rtv_desc.Format = att->image->d3d11_format;
  4594. switch (att->image->type) {
  4595. case SG_IMAGETYPE_2D:
  4596. if (is_msaa) {
  4597. d3d11_res = (ID3D11Resource*) att->image->d3d11_texmsaa;
  4598. d3d11_rtv_desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2DMS;
  4599. }
  4600. else {
  4601. d3d11_res = (ID3D11Resource*) att->image->d3d11_tex2d;
  4602. d3d11_rtv_desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
  4603. d3d11_rtv_desc.Texture2D.MipSlice = att->mip_level;
  4604. }
  4605. break;
  4606. case SG_IMAGETYPE_CUBE:
  4607. case SG_IMAGETYPE_ARRAY:
  4608. if (is_msaa) {
  4609. d3d11_res = (ID3D11Resource*) att->image->d3d11_texmsaa;
  4610. d3d11_rtv_desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2DMSARRAY;
  4611. d3d11_rtv_desc.Texture2DMSArray.FirstArraySlice = att->slice;
  4612. d3d11_rtv_desc.Texture2DMSArray.ArraySize = 1;
  4613. }
  4614. else {
  4615. d3d11_res = (ID3D11Resource*) att->image->d3d11_tex2d;
  4616. d3d11_rtv_desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2DARRAY;
  4617. d3d11_rtv_desc.Texture2DArray.MipSlice = att->mip_level;
  4618. d3d11_rtv_desc.Texture2DArray.FirstArraySlice = att->slice;
  4619. d3d11_rtv_desc.Texture2DArray.ArraySize = 1;
  4620. }
  4621. break;
  4622. case SG_IMAGETYPE_3D:
  4623. SOKOL_ASSERT(!is_msaa);
  4624. d3d11_res = (ID3D11Resource*) att->image->d3d11_tex3d;
  4625. d3d11_rtv_desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE3D;
  4626. d3d11_rtv_desc.Texture3D.MipSlice = att->mip_level;
  4627. d3d11_rtv_desc.Texture3D.FirstWSlice = att->slice;
  4628. d3d11_rtv_desc.Texture3D.WSize = 1;
  4629. break;
  4630. default:
  4631. SOKOL_UNREACHABLE; break;
  4632. }
  4633. SOKOL_ASSERT(d3d11_res);
  4634. HRESULT hr = ID3D11Device_CreateRenderTargetView(_sg_d3d11.dev, d3d11_res, &d3d11_rtv_desc, &pass->d3d11_rtvs[i]);
  4635. SOKOL_ASSERT(SUCCEEDED(hr) && pass->d3d11_rtvs[i]);
  4636. }
  4637. }
  4638. /* optional depth-stencil image */
  4639. SOKOL_ASSERT(0 == pass->ds_att.image);
  4640. SOKOL_ASSERT(pass->d3d11_dsv == 0);
  4641. att_desc = &desc->depth_stencil_attachment;
  4642. const int ds_img_index = SG_MAX_COLOR_ATTACHMENTS;
  4643. if (att_desc->image.id != SG_INVALID_ID) {
  4644. SOKOL_ASSERT(att_images[ds_img_index] && (att_images[ds_img_index]->slot.id == att_desc->image.id));
  4645. SOKOL_ASSERT(_sg_is_valid_rendertarget_depth_format(att_images[ds_img_index]->pixel_format));
  4646. att = &pass->ds_att;
  4647. SOKOL_ASSERT((att->image == 0) && (att->image_id.id == SG_INVALID_ID));
  4648. att->image = att_images[ds_img_index];
  4649. att->image_id = att_desc->image;
  4650. att->mip_level = att_desc->mip_level;
  4651. att->slice = att_desc->slice;
  4652. /* create D3D11 depth-stencil-view */
  4653. D3D11_DEPTH_STENCIL_VIEW_DESC d3d11_dsv_desc;
  4654. memset(&d3d11_dsv_desc, 0, sizeof(d3d11_dsv_desc));
  4655. d3d11_dsv_desc.Format = att->image->d3d11_format;
  4656. const bool is_msaa = att->image->sample_count > 1;
  4657. if (is_msaa) {
  4658. d3d11_dsv_desc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2DMS;
  4659. }
  4660. else {
  4661. d3d11_dsv_desc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
  4662. }
  4663. ID3D11Resource* d3d11_res = (ID3D11Resource*) att->image->d3d11_texds;
  4664. SOKOL_ASSERT(d3d11_res);
  4665. HRESULT hr = ID3D11Device_CreateDepthStencilView(_sg_d3d11.dev, d3d11_res, &d3d11_dsv_desc, &pass->d3d11_dsv);
  4666. SOKOL_ASSERT(SUCCEEDED(hr) && pass->d3d11_dsv);
  4667. }
  4668. pass->slot.state = SG_RESOURCESTATE_VALID;
  4669. }
  4670. _SOKOL_PRIVATE void _sg_destroy_pass(_sg_pass* pass) {
  4671. SOKOL_ASSERT(pass);
  4672. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  4673. if (pass->d3d11_rtvs[i]) {
  4674. ID3D11RenderTargetView_Release(pass->d3d11_rtvs[i]);
  4675. }
  4676. }
  4677. if (pass->d3d11_dsv) {
  4678. ID3D11DepthStencilView_Release(pass->d3d11_dsv);
  4679. }
  4680. _sg_init_pass(pass);
  4681. }
  4682. _SOKOL_PRIVATE void _sg_begin_pass(_sg_pass* pass, const sg_pass_action* action, int w, int h) {
  4683. SOKOL_ASSERT(action);
  4684. SOKOL_ASSERT(!_sg_d3d11.in_pass);
  4685. _sg_d3d11.in_pass = true;
  4686. _sg_d3d11.cur_width = w;
  4687. _sg_d3d11.cur_height = h;
  4688. if (pass) {
  4689. _sg_d3d11.cur_pass = pass;
  4690. _sg_d3d11.cur_pass_id.id = pass->slot.id;
  4691. _sg_d3d11.num_rtvs = 0;
  4692. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  4693. _sg_d3d11.cur_rtvs[i] = pass->d3d11_rtvs[i];
  4694. if (_sg_d3d11.cur_rtvs[i]) {
  4695. _sg_d3d11.num_rtvs++;
  4696. }
  4697. }
  4698. _sg_d3d11.cur_dsv = pass->d3d11_dsv;
  4699. }
  4700. else {
  4701. /* render to default frame buffer */
  4702. _sg_d3d11.cur_pass = 0;
  4703. _sg_d3d11.cur_pass_id.id = SG_INVALID_ID;
  4704. _sg_d3d11.num_rtvs = 1;
  4705. _sg_d3d11.cur_rtvs[0] = (ID3D11RenderTargetView*) _sg_d3d11.rtv_cb();
  4706. for (int i = 1; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  4707. _sg_d3d11.cur_rtvs[i] = 0;
  4708. }
  4709. _sg_d3d11.cur_dsv = (ID3D11DepthStencilView*) _sg_d3d11.dsv_cb();
  4710. SOKOL_ASSERT(_sg_d3d11.cur_rtvs[0] && _sg_d3d11.cur_dsv);
  4711. }
  4712. /* apply the render-target- and depth-stencil-views */
  4713. ID3D11DeviceContext_OMSetRenderTargets(_sg_d3d11.ctx, SG_MAX_COLOR_ATTACHMENTS, _sg_d3d11.cur_rtvs, _sg_d3d11.cur_dsv);
  4714. /* set viewport and scissor rect to cover whole screen */
  4715. D3D11_VIEWPORT vp;
  4716. memset(&vp, 0, sizeof(vp));
  4717. vp.Width = (FLOAT) w;
  4718. vp.Height = (FLOAT) h;
  4719. vp.MaxDepth = 1.0f;
  4720. ID3D11DeviceContext_RSSetViewports(_sg_d3d11.ctx, 1, &vp);
  4721. D3D11_RECT rect;
  4722. rect.left = 0;
  4723. rect.top = 0;
  4724. rect.right = w;
  4725. rect.bottom = h;
  4726. ID3D11DeviceContext_RSSetScissorRects(_sg_d3d11.ctx, 1, &rect);
  4727. /* perform clear action */
  4728. for (int i = 0; i < _sg_d3d11.num_rtvs; i++) {
  4729. if (action->colors[i].action == SG_ACTION_CLEAR) {
  4730. ID3D11DeviceContext_ClearRenderTargetView(_sg_d3d11.ctx, _sg_d3d11.cur_rtvs[i], action->colors[i].val);
  4731. }
  4732. }
  4733. UINT ds_flags = 0;
  4734. if (action->depth.action == SG_ACTION_CLEAR) {
  4735. ds_flags |= D3D11_CLEAR_DEPTH;
  4736. }
  4737. if (action->stencil.action == SG_ACTION_CLEAR) {
  4738. ds_flags |= D3D11_CLEAR_STENCIL;
  4739. }
  4740. if ((0 != ds_flags) && _sg_d3d11.cur_dsv) {
  4741. ID3D11DeviceContext_ClearDepthStencilView(_sg_d3d11.ctx, _sg_d3d11.cur_dsv, ds_flags, action->depth.val, action->stencil.val);
  4742. }
  4743. }
  4744. /* D3D11CalcSubresource only exists for C++ */
  4745. _SOKOL_PRIVATE UINT _sg_d3d11_calcsubresource(UINT mip_slice, UINT array_slice, UINT mip_levels) {
  4746. return mip_slice + array_slice * mip_levels;
  4747. }
  4748. _SOKOL_PRIVATE void _sg_end_pass() {
  4749. SOKOL_ASSERT(_sg_d3d11.in_pass && _sg_d3d11.ctx);
  4750. _sg_d3d11.in_pass = false;
  4751. /* need to resolve MSAA render target into texture? */
  4752. if (_sg_d3d11.cur_pass) {
  4753. SOKOL_ASSERT(_sg_d3d11.cur_pass->slot.id == _sg_d3d11.cur_pass_id.id);
  4754. for (int i = 0; i < _sg_d3d11.num_rtvs; i++) {
  4755. _sg_attachment* att = &_sg_d3d11.cur_pass->color_atts[i];
  4756. SOKOL_ASSERT(att->image && (att->image->slot.id == att->image_id.id));
  4757. if (att->image->sample_count > 1) {
  4758. SOKOL_ASSERT(att->image->d3d11_tex2d && att->image->d3d11_texmsaa && !att->image->d3d11_tex3d);
  4759. SOKOL_ASSERT(DXGI_FORMAT_UNKNOWN != att->image->d3d11_format);
  4760. const _sg_image* img = att->image;
  4761. UINT subres = _sg_d3d11_calcsubresource(att->mip_level, att->slice, img->num_mipmaps);
  4762. ID3D11DeviceContext_ResolveSubresource(_sg_d3d11.ctx,
  4763. (ID3D11Resource*) img->d3d11_tex2d, /* pDstResource */
  4764. subres, /* DstSubresource */
  4765. (ID3D11Resource*) img->d3d11_texmsaa, /* pSrcResource */
  4766. subres, /* SrcSubresource */
  4767. img->d3d11_format);
  4768. }
  4769. }
  4770. }
  4771. _sg_d3d11.cur_pass = 0;
  4772. _sg_d3d11.cur_pass_id.id = SG_INVALID_ID;
  4773. _sg_d3d11.cur_pipeline = 0;
  4774. _sg_d3d11.cur_pipeline_id.id = SG_INVALID_ID;
  4775. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  4776. _sg_d3d11.cur_rtvs[i] = 0;
  4777. }
  4778. _sg_d3d11.cur_dsv = 0;
  4779. _sg_d3d11_clear_state();
  4780. }
  4781. _SOKOL_PRIVATE void _sg_apply_viewport(int x, int y, int w, int h, bool origin_top_left) {
  4782. SOKOL_ASSERT(_sg_d3d11.ctx);
  4783. SOKOL_ASSERT(_sg_d3d11.in_pass);
  4784. D3D11_VIEWPORT vp;
  4785. vp.TopLeftX = (FLOAT) x;
  4786. vp.TopLeftY = (FLOAT) (origin_top_left ? y : (_sg_d3d11.cur_height - (y + h)));
  4787. vp.Width = (FLOAT) w;
  4788. vp.Height = (FLOAT) h;
  4789. vp.MinDepth = 0.0f;
  4790. vp.MaxDepth = 1.0f;
  4791. ID3D11DeviceContext_RSSetViewports(_sg_d3d11.ctx, 1, &vp);
  4792. }
  4793. _SOKOL_PRIVATE void _sg_apply_scissor_rect(int x, int y, int w, int h, bool origin_top_left) {
  4794. SOKOL_ASSERT(_sg_d3d11.ctx);
  4795. SOKOL_ASSERT(_sg_d3d11.in_pass);
  4796. D3D11_RECT rect;
  4797. rect.left = x;
  4798. rect.top = (origin_top_left ? y : (_sg_d3d11.cur_height - (y + h)));
  4799. rect.right = x + w;
  4800. rect.bottom = origin_top_left ? (y + h) : (_sg_d3d11.cur_height - y);
  4801. ID3D11DeviceContext_RSSetScissorRects(_sg_d3d11.ctx, 1, &rect);
  4802. }
  4803. _SOKOL_PRIVATE void _sg_apply_draw_state(
  4804. _sg_pipeline* pip,
  4805. _sg_buffer** vbs, int num_vbs, _sg_buffer* ib,
  4806. _sg_image** vs_imgs, int num_vs_imgs,
  4807. _sg_image** fs_imgs, int num_fs_imgs)
  4808. {
  4809. SOKOL_ASSERT(pip);
  4810. SOKOL_ASSERT(pip->shader);
  4811. SOKOL_ASSERT(_sg_d3d11.ctx);
  4812. SOKOL_ASSERT(_sg_d3d11.in_pass);
  4813. SOKOL_ASSERT(pip->d3d11_rs && pip->d3d11_bs && pip->d3d11_dss && pip->d3d11_il);
  4814. _sg_d3d11.cur_pipeline = pip;
  4815. _sg_d3d11.cur_pipeline_id.id = pip->slot.id;
  4816. _sg_d3d11.use_indexed_draw = (pip->d3d11_index_format != DXGI_FORMAT_UNKNOWN);
  4817. /* gather all the D3D11 resources into arrays */
  4818. ID3D11Buffer* d3d11_ib = ib ? ib->d3d11_buf : 0;
  4819. ID3D11Buffer* d3d11_vbs[SG_MAX_SHADERSTAGE_BUFFERS];
  4820. UINT d3d11_vb_offsets[SG_MAX_SHADERSTAGE_BUFFERS];
  4821. ID3D11ShaderResourceView* d3d11_vs_srvs[SG_MAX_SHADERSTAGE_IMAGES];
  4822. ID3D11SamplerState* d3d11_vs_smps[SG_MAX_SHADERSTAGE_IMAGES];
  4823. ID3D11ShaderResourceView* d3d11_fs_srvs[SG_MAX_SHADERSTAGE_IMAGES];
  4824. ID3D11SamplerState* d3d11_fs_smps[SG_MAX_SHADERSTAGE_IMAGES];
  4825. int i;
  4826. for (i = 0; i < num_vbs; i++) {
  4827. SOKOL_ASSERT(vbs[i]->d3d11_buf);
  4828. d3d11_vbs[i] = vbs[i]->d3d11_buf;
  4829. d3d11_vb_offsets[i] = 0;
  4830. }
  4831. for (; i < SG_MAX_SHADERSTAGE_BUFFERS; i++) {
  4832. d3d11_vbs[i] = 0;
  4833. d3d11_vb_offsets[i] = 0;
  4834. }
  4835. for (i = 0; i < num_vs_imgs; i++) {
  4836. SOKOL_ASSERT(vs_imgs[i]->d3d11_srv);
  4837. SOKOL_ASSERT(vs_imgs[i]->d3d11_smp);
  4838. d3d11_vs_srvs[i] = vs_imgs[i]->d3d11_srv;
  4839. d3d11_vs_smps[i] = vs_imgs[i]->d3d11_smp;
  4840. }
  4841. for (; i < SG_MAX_SHADERSTAGE_IMAGES; i++) {
  4842. d3d11_vs_srvs[i] = 0;
  4843. d3d11_vs_smps[i] = 0;
  4844. }
  4845. for (i = 0; i < num_fs_imgs; i++) {
  4846. SOKOL_ASSERT(fs_imgs[i]->d3d11_srv);
  4847. SOKOL_ASSERT(fs_imgs[i]->d3d11_smp);
  4848. d3d11_fs_srvs[i] = fs_imgs[i]->d3d11_srv;
  4849. d3d11_fs_smps[i] = fs_imgs[i]->d3d11_smp;
  4850. }
  4851. for (; i < SG_MAX_SHADERSTAGE_IMAGES; i++) {
  4852. d3d11_fs_srvs[i] = 0;
  4853. d3d11_fs_smps[i] = 0;
  4854. }
  4855. /* FIXME: is it worth it to implement a state cache here? measure! */
  4856. ID3D11DeviceContext_RSSetState(_sg_d3d11.ctx, pip->d3d11_rs);
  4857. ID3D11DeviceContext_OMSetDepthStencilState(_sg_d3d11.ctx, pip->d3d11_dss, pip->d3d11_stencil_ref);
  4858. ID3D11DeviceContext_OMSetBlendState(_sg_d3d11.ctx, pip->d3d11_bs, pip->blend_color, 0xFFFFFFFF);
  4859. ID3D11DeviceContext_IASetVertexBuffers(_sg_d3d11.ctx, 0, SG_MAX_SHADERSTAGE_BUFFERS, d3d11_vbs, pip->d3d11_vb_strides, d3d11_vb_offsets);
  4860. ID3D11DeviceContext_IASetPrimitiveTopology(_sg_d3d11.ctx, pip->d3d11_topology);
  4861. ID3D11DeviceContext_IASetIndexBuffer(_sg_d3d11.ctx, d3d11_ib, pip->d3d11_index_format, 0);
  4862. ID3D11DeviceContext_IASetInputLayout(_sg_d3d11.ctx, pip->d3d11_il);
  4863. ID3D11DeviceContext_VSSetShader(_sg_d3d11.ctx, pip->shader->d3d11_vs, NULL, 0);
  4864. ID3D11DeviceContext_VSSetConstantBuffers(_sg_d3d11.ctx, 0, SG_MAX_SHADERSTAGE_UBS, pip->shader->stage[SG_SHADERSTAGE_VS].d3d11_cbs);
  4865. ID3D11DeviceContext_VSSetShaderResources(_sg_d3d11.ctx, 0, SG_MAX_SHADERSTAGE_IMAGES, d3d11_vs_srvs);
  4866. ID3D11DeviceContext_VSSetSamplers(_sg_d3d11.ctx, 0, SG_MAX_SHADERSTAGE_IMAGES, d3d11_vs_smps);
  4867. ID3D11DeviceContext_PSSetShader(_sg_d3d11.ctx, pip->shader->d3d11_fs, NULL, 0);
  4868. ID3D11DeviceContext_PSSetConstantBuffers(_sg_d3d11.ctx, 0, SG_MAX_SHADERSTAGE_UBS, pip->shader->stage[SG_SHADERSTAGE_FS].d3d11_cbs);
  4869. ID3D11DeviceContext_PSSetShaderResources(_sg_d3d11.ctx, 0, SG_MAX_SHADERSTAGE_IMAGES, d3d11_fs_srvs);
  4870. ID3D11DeviceContext_PSSetSamplers(_sg_d3d11.ctx, 0, SG_MAX_SHADERSTAGE_IMAGES, d3d11_fs_smps);
  4871. }
  4872. _SOKOL_PRIVATE void _sg_apply_uniform_block(sg_shader_stage stage_index, int ub_index, const void* data, int num_bytes) {
  4873. SOKOL_ASSERT(_sg_d3d11.ctx && _sg_d3d11.in_pass);
  4874. SOKOL_ASSERT(data && (num_bytes > 0));
  4875. SOKOL_ASSERT((stage_index >= 0) && (stage_index < SG_NUM_SHADER_STAGES));
  4876. SOKOL_ASSERT((ub_index >= 0) && (ub_index < SG_MAX_SHADERSTAGE_UBS));
  4877. SOKOL_ASSERT(_sg_d3d11.cur_pipeline && _sg_d3d11.cur_pipeline->slot.id == _sg_d3d11.cur_pipeline_id.id);
  4878. SOKOL_ASSERT(_sg_d3d11.cur_pipeline->shader && _sg_d3d11.cur_pipeline->shader->slot.id == _sg_d3d11.cur_pipeline->shader_id.id);
  4879. SOKOL_ASSERT(ub_index < _sg_d3d11.cur_pipeline->shader->stage[stage_index].num_uniform_blocks);
  4880. SOKOL_ASSERT(num_bytes == _sg_d3d11.cur_pipeline->shader->stage[stage_index].uniform_blocks[ub_index].size);
  4881. ID3D11Buffer* cb = _sg_d3d11.cur_pipeline->shader->stage[stage_index].d3d11_cbs[ub_index];
  4882. SOKOL_ASSERT(cb);
  4883. ID3D11DeviceContext_UpdateSubresource(_sg_d3d11.ctx, (ID3D11Resource*)cb, 0, NULL, data, 0, 0);
  4884. }
  4885. _SOKOL_PRIVATE void _sg_draw(int base_element, int num_elements, int num_instances) {
  4886. SOKOL_ASSERT(_sg_d3d11.in_pass);
  4887. if (_sg_d3d11.use_indexed_draw) {
  4888. if (1 == num_instances) {
  4889. ID3D11DeviceContext_DrawIndexed(_sg_d3d11.ctx, num_elements, base_element, 0);
  4890. }
  4891. else {
  4892. ID3D11DeviceContext_DrawIndexedInstanced(_sg_d3d11.ctx, num_elements, num_instances, base_element, 0, 0);
  4893. }
  4894. }
  4895. else {
  4896. if (1 == num_instances) {
  4897. ID3D11DeviceContext_Draw(_sg_d3d11.ctx, num_elements, base_element);
  4898. }
  4899. else {
  4900. ID3D11DeviceContext_DrawInstanced(_sg_d3d11.ctx, num_elements, num_instances, base_element, 0);
  4901. }
  4902. }
  4903. }
  4904. _SOKOL_PRIVATE void _sg_commit() {
  4905. SOKOL_ASSERT(!_sg_d3d11.in_pass);
  4906. }
  4907. _SOKOL_PRIVATE void _sg_update_buffer(_sg_buffer* buf, const void* data_ptr, int data_size) {
  4908. SOKOL_ASSERT(buf && data_ptr && data_size);
  4909. SOKOL_ASSERT(_sg_d3d11.ctx);
  4910. SOKOL_ASSERT(buf->d3d11_buf);
  4911. D3D11_MAPPED_SUBRESOURCE d3d11_msr;
  4912. HRESULT hr = ID3D11DeviceContext_Map(_sg_d3d11.ctx, (ID3D11Resource*)buf->d3d11_buf, 0, D3D11_MAP_WRITE_DISCARD, 0, &d3d11_msr);
  4913. SOKOL_ASSERT(SUCCEEDED(hr));
  4914. memcpy(d3d11_msr.pData, data_ptr, data_size);
  4915. ID3D11DeviceContext_Unmap(_sg_d3d11.ctx, (ID3D11Resource*)buf->d3d11_buf, 0);
  4916. }
  4917. _SOKOL_PRIVATE void _sg_update_image(_sg_image* img, const sg_image_content* data) {
  4918. SOKOL_ASSERT(img && data);
  4919. SOKOL_ASSERT(_sg_d3d11.ctx);
  4920. SOKOL_ASSERT(img->d3d11_tex2d || img->d3d11_tex3d);
  4921. ID3D11Resource* d3d11_res = 0;
  4922. if (img->d3d11_tex3d) {
  4923. d3d11_res = (ID3D11Resource*) img->d3d11_tex3d;
  4924. }
  4925. else {
  4926. d3d11_res = (ID3D11Resource*) img->d3d11_tex2d;
  4927. }
  4928. SOKOL_ASSERT(d3d11_res);
  4929. const int num_faces = (img->type == SG_IMAGETYPE_CUBE) ? 6:1;
  4930. const int num_slices = (img->type == SG_IMAGETYPE_ARRAY) ? img->depth:1;
  4931. int subres_index = 0;
  4932. HRESULT hr;
  4933. D3D11_MAPPED_SUBRESOURCE d3d11_msr;
  4934. for (int face_index = 0; face_index < num_faces; face_index++) {
  4935. for (int slice_index = 0; slice_index < num_slices; slice_index++) {
  4936. for (int mip_index = 0; mip_index < img->num_mipmaps; mip_index++, subres_index++) {
  4937. SOKOL_ASSERT(subres_index < (SG_MAX_MIPMAPS * SG_MAX_TEXTUREARRAY_LAYERS));
  4938. const int mip_width = ((img->width>>mip_index)>0) ? img->width>>mip_index : 1;
  4939. const int mip_height = ((img->height>>mip_index)>0) ? img->height>>mip_index : 1;
  4940. const sg_subimage_content* subimg_content = &(data->subimage[face_index][mip_index]);
  4941. const int slice_size = subimg_content->size / num_slices;
  4942. const int slice_offset = slice_size * slice_index;
  4943. const uint8_t* slice_ptr = ((const uint8_t*)subimg_content->ptr) + slice_offset;
  4944. hr = ID3D11DeviceContext_Map(_sg_d3d11.ctx, d3d11_res, subres_index, D3D11_MAP_WRITE_DISCARD, 0, &d3d11_msr);
  4945. SOKOL_ASSERT(SUCCEEDED(hr));
  4946. memcpy(d3d11_msr.pData, slice_ptr, slice_size);
  4947. ID3D11DeviceContext_Unmap(_sg_d3d11.ctx, d3d11_res, subres_index);
  4948. }
  4949. }
  4950. }
  4951. }
  4952. _SOKOL_PRIVATE void _sg_reset_state_cache() {
  4953. /* just clear the d3d11 device context state */
  4954. _sg_d3d11_clear_state();
  4955. }
  4956. #ifdef __cplusplus
  4957. } // extern "C"
  4958. #endif
  4959. /*== METAL BACKEND ===========================================================*/
  4960. #elif defined(SOKOL_METAL_MACOS) || defined(SOKOL_METAL_IOS)
  4961. #if !__has_feature(objc_arc)
  4962. #error "Please enable ARC when using the Metal backend"
  4963. #endif
  4964. /* memset() */
  4965. #include <string.h>
  4966. #import <Metal/Metal.h>
  4967. #ifdef __cplusplus
  4968. extern "C" {
  4969. #endif
  4970. enum {
  4971. _SG_MTL_DEFAULT_UB_SIZE = 4 * 1024 * 1024,
  4972. #if defined(SOKOL_METAL_MACOS)
  4973. _SG_MTL_UB_ALIGN = 256,
  4974. #else
  4975. _SG_MTL_UB_ALIGN = 16,
  4976. #endif
  4977. _SG_MTL_DEFAULT_SAMPLER_CACHE_CAPACITY = 64,
  4978. _SG_MTL_INVALID_POOL_INDEX = 0xFFFFFFFF
  4979. };
  4980. /*-- enum translation functions ----------------------------------------------*/
  4981. _SOKOL_PRIVATE MTLLoadAction _sg_mtl_load_action(sg_action a) {
  4982. switch (a) {
  4983. case SG_ACTION_CLEAR: return MTLLoadActionClear;
  4984. case SG_ACTION_LOAD: return MTLLoadActionLoad;
  4985. case SG_ACTION_DONTCARE: return MTLLoadActionDontCare;
  4986. default: SOKOL_UNREACHABLE; return (MTLLoadAction)0;
  4987. }
  4988. }
  4989. _SOKOL_PRIVATE MTLResourceOptions _sg_mtl_buffer_resource_options(sg_usage usg) {
  4990. switch (usg) {
  4991. case SG_USAGE_IMMUTABLE:
  4992. return MTLResourceStorageModeShared;
  4993. case SG_USAGE_DYNAMIC:
  4994. case SG_USAGE_STREAM:
  4995. #if defined(SOKOL_METAL_MACOS)
  4996. return MTLCPUCacheModeWriteCombined|MTLResourceStorageModeManaged;
  4997. #else
  4998. return MTLCPUCacheModeWriteCombined;
  4999. #endif
  5000. default:
  5001. SOKOL_UNREACHABLE;
  5002. return 0;
  5003. }
  5004. }
  5005. _SOKOL_PRIVATE MTLVertexStepFunction _sg_mtl_step_function(sg_vertex_step step) {
  5006. switch (step) {
  5007. case SG_VERTEXSTEP_PER_VERTEX: return MTLVertexStepFunctionPerVertex;
  5008. case SG_VERTEXSTEP_PER_INSTANCE: return MTLVertexStepFunctionPerInstance;
  5009. default: SOKOL_UNREACHABLE; return (MTLVertexStepFunction)0;
  5010. }
  5011. }
  5012. _SOKOL_PRIVATE MTLVertexFormat _sg_mtl_vertex_format(sg_vertex_format fmt) {
  5013. switch (fmt) {
  5014. case SG_VERTEXFORMAT_FLOAT: return MTLVertexFormatFloat;
  5015. case SG_VERTEXFORMAT_FLOAT2: return MTLVertexFormatFloat2;
  5016. case SG_VERTEXFORMAT_FLOAT3: return MTLVertexFormatFloat3;
  5017. case SG_VERTEXFORMAT_FLOAT4: return MTLVertexFormatFloat4;
  5018. case SG_VERTEXFORMAT_BYTE4: return MTLVertexFormatChar4;
  5019. case SG_VERTEXFORMAT_BYTE4N: return MTLVertexFormatChar4Normalized;
  5020. case SG_VERTEXFORMAT_UBYTE4: return MTLVertexFormatUChar4;
  5021. case SG_VERTEXFORMAT_UBYTE4N: return MTLVertexFormatUChar4Normalized;
  5022. case SG_VERTEXFORMAT_SHORT2: return MTLVertexFormatShort2;
  5023. case SG_VERTEXFORMAT_SHORT2N: return MTLVertexFormatShort2Normalized;
  5024. case SG_VERTEXFORMAT_SHORT4: return MTLVertexFormatShort4;
  5025. case SG_VERTEXFORMAT_SHORT4N: return MTLVertexFormatShort4Normalized;
  5026. case SG_VERTEXFORMAT_UINT10_N2: return MTLVertexFormatUInt1010102Normalized;
  5027. default: SOKOL_UNREACHABLE; return (MTLVertexFormat)0;
  5028. }
  5029. }
  5030. _SOKOL_PRIVATE MTLPrimitiveType _sg_mtl_primitive_type(sg_primitive_type t) {
  5031. switch (t) {
  5032. case SG_PRIMITIVETYPE_POINTS: return MTLPrimitiveTypePoint;
  5033. case SG_PRIMITIVETYPE_LINES: return MTLPrimitiveTypeLine;
  5034. case SG_PRIMITIVETYPE_LINE_STRIP: return MTLPrimitiveTypeLineStrip;
  5035. case SG_PRIMITIVETYPE_TRIANGLES: return MTLPrimitiveTypeTriangle;
  5036. case SG_PRIMITIVETYPE_TRIANGLE_STRIP: return MTLPrimitiveTypeTriangleStrip;
  5037. default: SOKOL_UNREACHABLE; return (MTLPrimitiveType)0;
  5038. }
  5039. }
  5040. _SOKOL_PRIVATE MTLPixelFormat _sg_mtl_texture_format(sg_pixel_format fmt) {
  5041. switch (fmt) {
  5042. case SG_PIXELFORMAT_RGBA8: return MTLPixelFormatRGBA8Unorm;
  5043. case SG_PIXELFORMAT_R10G10B10A2: return MTLPixelFormatRGB10A2Unorm;
  5044. case SG_PIXELFORMAT_RGBA32F: return MTLPixelFormatRGBA32Float;
  5045. case SG_PIXELFORMAT_RGBA16F: return MTLPixelFormatRGBA16Float;
  5046. case SG_PIXELFORMAT_R32F: return MTLPixelFormatR32Float;
  5047. case SG_PIXELFORMAT_R16F: return MTLPixelFormatR16Float;
  5048. case SG_PIXELFORMAT_L8: return MTLPixelFormatR8Unorm;
  5049. #if defined(SOKOL_METAL_MACOS)
  5050. case SG_PIXELFORMAT_DXT1: return MTLPixelFormatBC1_RGBA;
  5051. case SG_PIXELFORMAT_DXT3: return MTLPixelFormatBC2_RGBA;
  5052. case SG_PIXELFORMAT_DXT5: return MTLPixelFormatBC3_RGBA;
  5053. #else
  5054. case SG_PIXELFORMAT_PVRTC2_RGB: return MTLPixelFormatPVRTC_RGB_2BPP;
  5055. case SG_PIXELFORMAT_PVRTC4_RGB: return MTLPixelFormatPVRTC_RGB_4BPP;
  5056. case SG_PIXELFORMAT_PVRTC2_RGBA: return MTLPixelFormatPVRTC_RGBA_2BPP;
  5057. case SG_PIXELFORMAT_PVRTC4_RGBA: return MTLPixelFormatPVRTC_RGBA_4BPP;
  5058. case SG_PIXELFORMAT_ETC2_RGB8: return MTLPixelFormatETC2_RGB8;
  5059. case SG_PIXELFORMAT_ETC2_SRGB8: return MTLPixelFormatETC2_RGB8_sRGB;
  5060. #endif
  5061. default: return MTLPixelFormatInvalid;
  5062. }
  5063. }
  5064. _SOKOL_PRIVATE MTLPixelFormat _sg_mtl_rendertarget_color_format(sg_pixel_format fmt) {
  5065. switch (fmt) {
  5066. case SG_PIXELFORMAT_RGBA8: return MTLPixelFormatBGRA8Unorm; /* not a bug */
  5067. case SG_PIXELFORMAT_RGBA32F: return MTLPixelFormatRGBA32Float;
  5068. case SG_PIXELFORMAT_RGBA16F: return MTLPixelFormatRGBA16Float;
  5069. case SG_PIXELFORMAT_R10G10B10A2: return MTLPixelFormatRGB10A2Unorm;
  5070. default: return MTLPixelFormatInvalid;
  5071. }
  5072. }
  5073. _SOKOL_PRIVATE MTLPixelFormat _sg_mtl_rendertarget_depth_format(sg_pixel_format fmt) {
  5074. switch (fmt) {
  5075. case SG_PIXELFORMAT_DEPTH:
  5076. return MTLPixelFormatDepth32Float;
  5077. case SG_PIXELFORMAT_DEPTHSTENCIL:
  5078. /* NOTE: Depth24_Stencil8 isn't universally supported! */
  5079. return MTLPixelFormatDepth32Float_Stencil8;
  5080. default:
  5081. return MTLPixelFormatInvalid;
  5082. }
  5083. }
  5084. _SOKOL_PRIVATE MTLPixelFormat _sg_mtl_rendertarget_stencil_format(sg_pixel_format fmt) {
  5085. switch (fmt) {
  5086. case SG_PIXELFORMAT_DEPTHSTENCIL:
  5087. return MTLPixelFormatDepth32Float_Stencil8;
  5088. default:
  5089. return MTLPixelFormatInvalid;
  5090. }
  5091. }
  5092. _SOKOL_PRIVATE MTLColorWriteMask _sg_mtl_color_write_mask(sg_color_mask m) {
  5093. MTLColorWriteMask mtl_mask = MTLColorWriteMaskNone;
  5094. if (m & SG_COLORMASK_R) {
  5095. mtl_mask |= MTLColorWriteMaskRed;
  5096. }
  5097. if (m & SG_COLORMASK_G) {
  5098. mtl_mask |= MTLColorWriteMaskGreen;
  5099. }
  5100. if (m & SG_COLORMASK_B) {
  5101. mtl_mask |= MTLColorWriteMaskBlue;
  5102. }
  5103. if (m & SG_COLORMASK_A) {
  5104. mtl_mask |= MTLColorWriteMaskAlpha;
  5105. }
  5106. return mtl_mask;
  5107. }
  5108. _SOKOL_PRIVATE MTLBlendOperation _sg_mtl_blend_op(sg_blend_op op) {
  5109. switch (op) {
  5110. case SG_BLENDOP_ADD: return MTLBlendOperationAdd;
  5111. case SG_BLENDOP_SUBTRACT: return MTLBlendOperationSubtract;
  5112. case SG_BLENDOP_REVERSE_SUBTRACT: return MTLBlendOperationReverseSubtract;
  5113. default: SOKOL_UNREACHABLE; return (MTLBlendOperation)0;
  5114. }
  5115. }
  5116. _SOKOL_PRIVATE MTLBlendFactor _sg_mtl_blend_factor(sg_blend_factor f) {
  5117. switch (f) {
  5118. case SG_BLENDFACTOR_ZERO: return MTLBlendFactorZero;
  5119. case SG_BLENDFACTOR_ONE: return MTLBlendFactorOne;
  5120. case SG_BLENDFACTOR_SRC_COLOR: return MTLBlendFactorSourceColor;
  5121. case SG_BLENDFACTOR_ONE_MINUS_SRC_COLOR: return MTLBlendFactorOneMinusSourceColor;
  5122. case SG_BLENDFACTOR_SRC_ALPHA: return MTLBlendFactorSourceAlpha;
  5123. case SG_BLENDFACTOR_ONE_MINUS_SRC_ALPHA: return MTLBlendFactorOneMinusSourceAlpha;
  5124. case SG_BLENDFACTOR_DST_COLOR: return MTLBlendFactorDestinationColor;
  5125. case SG_BLENDFACTOR_ONE_MINUS_DST_COLOR: return MTLBlendFactorOneMinusDestinationColor;
  5126. case SG_BLENDFACTOR_DST_ALPHA: return MTLBlendFactorDestinationAlpha;
  5127. case SG_BLENDFACTOR_ONE_MINUS_DST_ALPHA: return MTLBlendFactorOneMinusDestinationAlpha;
  5128. case SG_BLENDFACTOR_SRC_ALPHA_SATURATED: return MTLBlendFactorSourceAlphaSaturated;
  5129. case SG_BLENDFACTOR_BLEND_COLOR: return MTLBlendFactorBlendColor;
  5130. case SG_BLENDFACTOR_ONE_MINUS_BLEND_COLOR: return MTLBlendFactorOneMinusBlendColor;
  5131. case SG_BLENDFACTOR_BLEND_ALPHA: return MTLBlendFactorBlendAlpha;
  5132. case SG_BLENDFACTOR_ONE_MINUS_BLEND_ALPHA: return MTLBlendFactorOneMinusBlendAlpha;
  5133. default: SOKOL_UNREACHABLE; return (MTLBlendFactor)0;
  5134. }
  5135. }
  5136. _SOKOL_PRIVATE MTLCompareFunction _sg_mtl_compare_func(sg_compare_func f) {
  5137. switch (f) {
  5138. case SG_COMPAREFUNC_NEVER: return MTLCompareFunctionNever;
  5139. case SG_COMPAREFUNC_LESS: return MTLCompareFunctionLess;
  5140. case SG_COMPAREFUNC_EQUAL: return MTLCompareFunctionEqual;
  5141. case SG_COMPAREFUNC_LESS_EQUAL: return MTLCompareFunctionLessEqual;
  5142. case SG_COMPAREFUNC_GREATER: return MTLCompareFunctionGreater;
  5143. case SG_COMPAREFUNC_NOT_EQUAL: return MTLCompareFunctionNotEqual;
  5144. case SG_COMPAREFUNC_GREATER_EQUAL: return MTLCompareFunctionGreaterEqual;
  5145. case SG_COMPAREFUNC_ALWAYS: return MTLCompareFunctionAlways;
  5146. default: SOKOL_UNREACHABLE; return (MTLCompareFunction)0;
  5147. }
  5148. }
  5149. _SOKOL_PRIVATE MTLStencilOperation _sg_mtl_stencil_op(sg_stencil_op op) {
  5150. switch (op) {
  5151. case SG_STENCILOP_KEEP: return MTLStencilOperationKeep;
  5152. case SG_STENCILOP_ZERO: return MTLStencilOperationZero;
  5153. case SG_STENCILOP_REPLACE: return MTLStencilOperationReplace;
  5154. case SG_STENCILOP_INCR_CLAMP: return MTLStencilOperationIncrementClamp;
  5155. case SG_STENCILOP_DECR_CLAMP: return MTLStencilOperationDecrementClamp;
  5156. case SG_STENCILOP_INVERT: return MTLStencilOperationInvert;
  5157. case SG_STENCILOP_INCR_WRAP: return MTLStencilOperationIncrementWrap;
  5158. case SG_STENCILOP_DECR_WRAP: return MTLStencilOperationDecrementWrap;
  5159. default: SOKOL_UNREACHABLE; return (MTLStencilOperation)0;
  5160. }
  5161. }
  5162. _SOKOL_PRIVATE MTLCullMode _sg_mtl_cull_mode(sg_cull_mode m) {
  5163. switch (m) {
  5164. case SG_CULLMODE_NONE: return MTLCullModeNone;
  5165. case SG_CULLMODE_FRONT: return MTLCullModeFront;
  5166. case SG_CULLMODE_BACK: return MTLCullModeBack;
  5167. default: SOKOL_UNREACHABLE; return (MTLCullMode)0;
  5168. }
  5169. }
  5170. _SOKOL_PRIVATE MTLWinding _sg_mtl_winding(sg_face_winding w) {
  5171. switch (w) {
  5172. case SG_FACEWINDING_CW: return MTLWindingClockwise;
  5173. case SG_FACEWINDING_CCW: return MTLWindingCounterClockwise;
  5174. default: SOKOL_UNREACHABLE; return (MTLWinding)0;
  5175. }
  5176. }
  5177. _SOKOL_PRIVATE MTLIndexType _sg_mtl_index_type(sg_index_type t) {
  5178. switch (t) {
  5179. case SG_INDEXTYPE_UINT16: return MTLIndexTypeUInt16;
  5180. case SG_INDEXTYPE_UINT32: return MTLIndexTypeUInt32;
  5181. default: SOKOL_UNREACHABLE; return (MTLIndexType)0;
  5182. }
  5183. }
  5184. _SOKOL_PRIVATE NSUInteger _sg_mtl_index_size(sg_index_type t) {
  5185. switch (t) {
  5186. case SG_INDEXTYPE_NONE: return 0;
  5187. case SG_INDEXTYPE_UINT16: return 2;
  5188. case SG_INDEXTYPE_UINT32: return 4;
  5189. default: SOKOL_UNREACHABLE; return 0;
  5190. }
  5191. }
  5192. _SOKOL_PRIVATE MTLTextureType _sg_mtl_texture_type(sg_image_type t) {
  5193. switch (t) {
  5194. case SG_IMAGETYPE_2D: return MTLTextureType2D;
  5195. case SG_IMAGETYPE_CUBE: return MTLTextureTypeCube;
  5196. case SG_IMAGETYPE_3D: return MTLTextureType3D;
  5197. case SG_IMAGETYPE_ARRAY: return MTLTextureType2DArray;
  5198. default: SOKOL_UNREACHABLE; return (MTLTextureType)0;
  5199. }
  5200. }
  5201. _SOKOL_PRIVATE bool _sg_mtl_is_pvrtc(sg_pixel_format fmt) {
  5202. switch (fmt) {
  5203. case SG_PIXELFORMAT_PVRTC2_RGB:
  5204. case SG_PIXELFORMAT_PVRTC2_RGBA:
  5205. case SG_PIXELFORMAT_PVRTC4_RGB:
  5206. case SG_PIXELFORMAT_PVRTC4_RGBA:
  5207. return true;
  5208. default:
  5209. return false;
  5210. }
  5211. }
  5212. _SOKOL_PRIVATE MTLSamplerAddressMode _sg_mtl_address_mode(sg_wrap w) {
  5213. switch (w) {
  5214. case SG_WRAP_REPEAT: return MTLSamplerAddressModeRepeat;
  5215. case SG_WRAP_CLAMP_TO_EDGE: return MTLSamplerAddressModeClampToEdge;
  5216. case SG_WRAP_MIRRORED_REPEAT: return MTLSamplerAddressModeMirrorRepeat;
  5217. default: SOKOL_UNREACHABLE; return (MTLSamplerAddressMode)0;
  5218. }
  5219. }
  5220. _SOKOL_PRIVATE MTLSamplerMinMagFilter _sg_mtl_minmag_filter(sg_filter f) {
  5221. switch (f) {
  5222. case SG_FILTER_NEAREST:
  5223. case SG_FILTER_NEAREST_MIPMAP_NEAREST:
  5224. case SG_FILTER_NEAREST_MIPMAP_LINEAR:
  5225. return MTLSamplerMinMagFilterNearest;
  5226. case SG_FILTER_LINEAR:
  5227. case SG_FILTER_LINEAR_MIPMAP_NEAREST:
  5228. case SG_FILTER_LINEAR_MIPMAP_LINEAR:
  5229. return MTLSamplerMinMagFilterLinear;
  5230. default:
  5231. SOKOL_UNREACHABLE; return (MTLSamplerMinMagFilter)0;
  5232. }
  5233. }
  5234. _SOKOL_PRIVATE MTLSamplerMipFilter _sg_mtl_mip_filter(sg_filter f) {
  5235. switch (f) {
  5236. case SG_FILTER_NEAREST:
  5237. case SG_FILTER_LINEAR:
  5238. return MTLSamplerMipFilterNotMipmapped;
  5239. case SG_FILTER_NEAREST_MIPMAP_NEAREST:
  5240. case SG_FILTER_LINEAR_MIPMAP_NEAREST:
  5241. return MTLSamplerMipFilterNearest;
  5242. case SG_FILTER_NEAREST_MIPMAP_LINEAR:
  5243. case SG_FILTER_LINEAR_MIPMAP_LINEAR:
  5244. return MTLSamplerMipFilterLinear;
  5245. default:
  5246. SOKOL_UNREACHABLE; return (MTLSamplerMipFilter)0;
  5247. }
  5248. }
  5249. /*-- a pool for all Metal resource objects, with deferred release queue -------*/
  5250. static uint32_t _sg_mtl_pool_size;
  5251. static NSMutableArray* _sg_mtl_pool;
  5252. static uint32_t _sg_mtl_free_queue_top;
  5253. static uint32_t* _sg_mtl_free_queue;
  5254. static uint32_t _sg_mtl_release_queue_front;
  5255. static uint32_t _sg_mtl_release_queue_back;
  5256. typedef struct {
  5257. uint32_t frame_index; /* frame index at which it is safe to release this resource */
  5258. uint32_t pool_index;
  5259. } _sg_mtl_release_item;
  5260. static _sg_mtl_release_item* _sg_mtl_release_queue;
  5261. _SOKOL_PRIVATE void _sg_mtl_init_pool(const sg_desc* desc) {
  5262. _sg_mtl_pool_size = 2 *
  5263. 2 * _sg_def(desc->buffer_pool_size, _SG_DEFAULT_BUFFER_POOL_SIZE) +
  5264. 5 * _sg_def(desc->image_pool_size, _SG_DEFAULT_IMAGE_POOL_SIZE) +
  5265. 4 * _sg_def(desc->shader_pool_size, _SG_DEFAULT_SHADER_POOL_SIZE) +
  5266. 2 * _sg_def(desc->pipeline_pool_size, _SG_DEFAULT_PIPELINE_POOL_SIZE) +
  5267. _sg_def(desc->pass_pool_size, _SG_DEFAULT_PASS_POOL_SIZE);
  5268. _sg_mtl_pool = [NSMutableArray arrayWithCapacity:_sg_mtl_pool_size];
  5269. NSNull* null = [NSNull null];
  5270. for (uint32_t i = 0; i < _sg_mtl_pool_size; i++) {
  5271. [_sg_mtl_pool addObject:null];
  5272. }
  5273. SOKOL_ASSERT([_sg_mtl_pool count] == _sg_mtl_pool_size);
  5274. /* a queue of currently free slot indices */
  5275. _sg_mtl_free_queue_top = 0;
  5276. _sg_mtl_free_queue = (uint32_t*)SOKOL_MALLOC(_sg_mtl_pool_size * sizeof(uint32_t));
  5277. for (int i = _sg_mtl_pool_size-1; i >= 0; i--) {
  5278. _sg_mtl_free_queue[_sg_mtl_free_queue_top++] = (uint32_t)i;
  5279. }
  5280. /* a circular queue which holds release items (frame index
  5281. when a resource is to be released, and the resource's
  5282. pool index
  5283. */
  5284. _sg_mtl_release_queue_front = 0;
  5285. _sg_mtl_release_queue_back = 0;
  5286. _sg_mtl_release_queue = (_sg_mtl_release_item*)SOKOL_MALLOC(_sg_mtl_pool_size * sizeof(_sg_mtl_release_item));
  5287. for (uint32_t i = 0; i < _sg_mtl_pool_size; i++) {
  5288. _sg_mtl_release_queue[i].frame_index = 0;
  5289. _sg_mtl_release_queue[i].pool_index = _SG_MTL_INVALID_POOL_INDEX;
  5290. }
  5291. }
  5292. _SOKOL_PRIVATE void _sg_mtl_destroy_pool() {
  5293. SOKOL_FREE(_sg_mtl_release_queue); _sg_mtl_release_queue = 0;
  5294. SOKOL_FREE(_sg_mtl_free_queue); _sg_mtl_free_queue = 0;
  5295. _sg_mtl_pool = nil;
  5296. }
  5297. /* get a new free resource pool slot */
  5298. _SOKOL_PRIVATE uint32_t _sg_mtl_alloc_pool_slot() {
  5299. SOKOL_ASSERT(_sg_mtl_free_queue_top > 0);
  5300. const uint32_t pool_index = _sg_mtl_free_queue[--_sg_mtl_free_queue_top];
  5301. return pool_index;
  5302. }
  5303. /* put a free resource pool slot back into the free-queue */
  5304. _SOKOL_PRIVATE void _sg_mtl_free_pool_slot(uint32_t pool_index) {
  5305. SOKOL_ASSERT(_sg_mtl_free_queue_top < _sg_mtl_pool_size);
  5306. _sg_mtl_free_queue[_sg_mtl_free_queue_top++] = pool_index;
  5307. }
  5308. /* add an MTLResource to the pool, return pool index or 0xFFFFFFFF if input was 'nil' */
  5309. _SOKOL_PRIVATE uint32_t _sg_mtl_add_resource(id res) {
  5310. if (nil == res) {
  5311. return _SG_MTL_INVALID_POOL_INDEX;
  5312. }
  5313. const uint32_t pool_index = _sg_mtl_alloc_pool_slot();
  5314. SOKOL_ASSERT([NSNull null] == _sg_mtl_pool[pool_index]);
  5315. _sg_mtl_pool[pool_index] = res;
  5316. return pool_index;
  5317. }
  5318. /* mark an MTLResource for release, this will put the resource into the
  5319. deferred-release queue, and the resource will then be released N frames later,
  5320. the special pool index 0xFFFFFFFF will be ignored (this means that a nil
  5321. value was provided to _sg_mtl_add_resource()
  5322. */
  5323. _SOKOL_PRIVATE void _sg_mtl_release_resource(uint32_t frame_index, uint32_t pool_index) {
  5324. if (pool_index == _SG_MTL_INVALID_POOL_INDEX) {
  5325. return;
  5326. }
  5327. SOKOL_ASSERT((pool_index >= 0) && (pool_index < _sg_mtl_pool_size));
  5328. SOKOL_ASSERT([NSNull null] != _sg_mtl_pool[pool_index]);
  5329. int slot_index = _sg_mtl_release_queue_front++;
  5330. if (_sg_mtl_release_queue_front >= _sg_mtl_pool_size) {
  5331. /* wrap-around */
  5332. _sg_mtl_release_queue_front = 0;
  5333. }
  5334. /* release queue full? */
  5335. SOKOL_ASSERT(_sg_mtl_release_queue_front != _sg_mtl_release_queue_back);
  5336. SOKOL_ASSERT(0 == _sg_mtl_release_queue[slot_index].frame_index);
  5337. const uint32_t safe_to_release_frame_index = frame_index + SG_NUM_INFLIGHT_FRAMES + 1;
  5338. _sg_mtl_release_queue[slot_index].frame_index = safe_to_release_frame_index;
  5339. _sg_mtl_release_queue[slot_index].pool_index = pool_index;
  5340. }
  5341. /* run garbage-collection pass on all resources in the release-queue */
  5342. _SOKOL_PRIVATE void _sg_mtl_garbage_collect(uint32_t frame_index) {
  5343. while (_sg_mtl_release_queue_back != _sg_mtl_release_queue_front) {
  5344. if (frame_index < _sg_mtl_release_queue[_sg_mtl_release_queue_back].frame_index) {
  5345. /* don't need to check further, release-items past this are too young */
  5346. break;
  5347. }
  5348. /* safe to release this resource */
  5349. const uint32_t pool_index = _sg_mtl_release_queue[_sg_mtl_release_queue_back].pool_index;
  5350. SOKOL_ASSERT(pool_index < _sg_mtl_pool_size);
  5351. SOKOL_ASSERT(_sg_mtl_pool[pool_index] != [NSNull null]);
  5352. _sg_mtl_pool[pool_index] = [NSNull null];
  5353. /* put the now free pool index back on the free queue */
  5354. _sg_mtl_free_pool_slot(pool_index);
  5355. /* reset the release queue slot and advance the back index */
  5356. _sg_mtl_release_queue[_sg_mtl_release_queue_back].frame_index = 0;
  5357. _sg_mtl_release_queue[_sg_mtl_release_queue_back].pool_index = _SG_MTL_INVALID_POOL_INDEX;
  5358. _sg_mtl_release_queue_back++;
  5359. if (_sg_mtl_release_queue_back >= _sg_mtl_pool_size) {
  5360. /* wrap-around */
  5361. _sg_mtl_release_queue_back = 0;
  5362. }
  5363. }
  5364. }
  5365. /*-- a very simple sampler cache -----------------------------------------------
  5366. since there's only a small number of different samplers, sampler objects
  5367. will never be deleted (except on shutdown), and searching an identical
  5368. sampler is a simple linear search
  5369. */
  5370. typedef struct {
  5371. sg_filter min_filter;
  5372. sg_filter mag_filter;
  5373. sg_wrap wrap_u;
  5374. sg_wrap wrap_v;
  5375. sg_wrap wrap_w;
  5376. uint32_t max_anisotropy;
  5377. int min_lod; /* orig min/max_lod is float, this is int(min/max_lod*1000.0) */
  5378. int max_lod;
  5379. uint32_t mtl_sampler_state;
  5380. } _sg_mtl_sampler_cache_item;
  5381. static int _sg_mtl_sampler_cache_capacity;
  5382. static int _sg_mtl_sampler_cache_size;
  5383. static _sg_mtl_sampler_cache_item* _sg_mtl_sampler_cache;
  5384. /* initialize the sampler cache */
  5385. _SOKOL_PRIVATE void _sg_mtl_init_sampler_cache(const sg_desc* desc) {
  5386. _sg_mtl_sampler_cache_capacity = _sg_def(desc->mtl_sampler_cache_size, _SG_MTL_DEFAULT_SAMPLER_CACHE_CAPACITY);
  5387. _sg_mtl_sampler_cache_size = 0;
  5388. const int size = _sg_mtl_sampler_cache_capacity * sizeof(_sg_mtl_sampler_cache_item);
  5389. _sg_mtl_sampler_cache = (_sg_mtl_sampler_cache_item*)SOKOL_MALLOC(size);
  5390. memset(_sg_mtl_sampler_cache, 0, size);
  5391. }
  5392. /* destroy the sampler cache, and release all sampler objects */
  5393. _SOKOL_PRIVATE void _sg_mtl_destroy_sampler_cache(uint32_t frame_index) {
  5394. SOKOL_ASSERT(_sg_mtl_sampler_cache);
  5395. SOKOL_ASSERT(_sg_mtl_sampler_cache_size <= _sg_mtl_sampler_cache_capacity);
  5396. for (int i = 0; i < _sg_mtl_sampler_cache_size; i++) {
  5397. _sg_mtl_release_resource(frame_index, _sg_mtl_sampler_cache[i].mtl_sampler_state);
  5398. }
  5399. SOKOL_FREE(_sg_mtl_sampler_cache); _sg_mtl_sampler_cache = 0;
  5400. _sg_mtl_sampler_cache_size = 0;
  5401. _sg_mtl_sampler_cache_capacity = 0;
  5402. }
  5403. /*
  5404. create and add an MTLSamplerStateObject and return its resource pool index,
  5405. reuse identical sampler state if one exists
  5406. */
  5407. _SOKOL_PRIVATE uint32_t _sg_mtl_create_sampler(id<MTLDevice> mtl_device, const sg_image_desc* img_desc) {
  5408. SOKOL_ASSERT(img_desc);
  5409. SOKOL_ASSERT(_sg_mtl_sampler_cache);
  5410. /* sampler state cache is full */
  5411. const sg_filter min_filter = _sg_def(img_desc->min_filter, SG_FILTER_NEAREST);
  5412. const sg_filter mag_filter = _sg_def(img_desc->mag_filter, SG_FILTER_NEAREST);
  5413. const sg_wrap wrap_u = _sg_def(img_desc->wrap_u, SG_WRAP_REPEAT);
  5414. const sg_wrap wrap_v = _sg_def(img_desc->wrap_v, SG_WRAP_REPEAT);
  5415. const sg_wrap wrap_w = _sg_def(img_desc->wrap_w, SG_WRAP_REPEAT);
  5416. const uint32_t max_anisotropy = _sg_def(img_desc->max_anisotropy, 1);
  5417. /* convert floats to valid int for proper comparison */
  5418. const int min_lod = (int)(img_desc->min_lod * 1000.0f);
  5419. const int max_lod = (int)(_sg_def_flt(img_desc->max_lod, 1000.0f) * 1000.0f);
  5420. /* first try to find identical sampler, number of samplers will be small, so linear search is ok */
  5421. for (int i = 0; i < _sg_mtl_sampler_cache_size; i++) {
  5422. _sg_mtl_sampler_cache_item* item = &_sg_mtl_sampler_cache[i];
  5423. if ((min_filter == item->min_filter) &&
  5424. (mag_filter == item->mag_filter) &&
  5425. (wrap_u == item->wrap_u) &&
  5426. (wrap_v == item->wrap_v) &&
  5427. (wrap_w == item->wrap_w) &&
  5428. (max_anisotropy == item->max_anisotropy) &&
  5429. (min_lod == item->min_lod) &&
  5430. (max_lod == item->max_lod))
  5431. {
  5432. return item->mtl_sampler_state;
  5433. }
  5434. }
  5435. /* fallthrough: need to create a new MTLSamplerState object */
  5436. SOKOL_ASSERT(_sg_mtl_sampler_cache_size < _sg_mtl_sampler_cache_capacity);
  5437. _sg_mtl_sampler_cache_item* new_item = &_sg_mtl_sampler_cache[_sg_mtl_sampler_cache_size++];
  5438. new_item->min_filter = min_filter;
  5439. new_item->mag_filter = mag_filter;
  5440. new_item->wrap_u = wrap_u;
  5441. new_item->wrap_v = wrap_v;
  5442. new_item->wrap_w = wrap_w;
  5443. new_item->min_lod = min_lod;
  5444. new_item->max_lod = max_lod;
  5445. new_item->max_anisotropy = max_anisotropy;
  5446. MTLSamplerDescriptor* mtl_desc = [[MTLSamplerDescriptor alloc] init];
  5447. mtl_desc.sAddressMode = _sg_mtl_address_mode(wrap_u);
  5448. mtl_desc.tAddressMode = _sg_mtl_address_mode(wrap_v);
  5449. if (SG_IMAGETYPE_3D == img_desc->type) {
  5450. mtl_desc.rAddressMode = _sg_mtl_address_mode(wrap_w);
  5451. }
  5452. mtl_desc.minFilter = _sg_mtl_minmag_filter(min_filter);
  5453. mtl_desc.magFilter = _sg_mtl_minmag_filter(mag_filter);
  5454. mtl_desc.mipFilter = _sg_mtl_mip_filter(min_filter);
  5455. mtl_desc.lodMinClamp = img_desc->min_lod;
  5456. mtl_desc.lodMaxClamp = _sg_def_flt(img_desc->max_lod, FLT_MAX);
  5457. mtl_desc.maxAnisotropy = max_anisotropy;
  5458. mtl_desc.normalizedCoordinates = YES;
  5459. id<MTLSamplerState> mtl_sampler = [mtl_device newSamplerStateWithDescriptor:mtl_desc];
  5460. new_item->mtl_sampler_state = _sg_mtl_add_resource(mtl_sampler);
  5461. return new_item->mtl_sampler_state;
  5462. }
  5463. /*-- Metal backend resource structs ------------------------------------------*/
  5464. typedef struct {
  5465. _sg_slot slot;
  5466. int size;
  5467. sg_buffer_type type;
  5468. sg_usage usage;
  5469. uint32_t upd_frame_index;
  5470. int num_slots;
  5471. int active_slot;
  5472. uint32_t mtl_buf[SG_NUM_INFLIGHT_FRAMES]; /* index intp _sg_mtl_pool */
  5473. } _sg_buffer;
  5474. _SOKOL_PRIVATE void _sg_init_buffer(_sg_buffer* buf) {
  5475. SOKOL_ASSERT(buf);
  5476. memset(buf, 0, sizeof(_sg_buffer));
  5477. }
  5478. typedef struct {
  5479. _sg_slot slot;
  5480. sg_image_type type;
  5481. bool render_target;
  5482. int width;
  5483. int height;
  5484. int depth;
  5485. int num_mipmaps;
  5486. sg_usage usage;
  5487. sg_pixel_format pixel_format;
  5488. int sample_count;
  5489. sg_filter min_filter;
  5490. sg_filter mag_filter;
  5491. sg_wrap wrap_u;
  5492. sg_wrap wrap_v;
  5493. sg_wrap wrap_w;
  5494. uint32_t max_anisotropy;
  5495. uint32_t upd_frame_index;
  5496. int num_slots;
  5497. int active_slot;
  5498. uint32_t mtl_tex[SG_NUM_INFLIGHT_FRAMES];
  5499. uint32_t mtl_depth_tex;
  5500. uint32_t mtl_msaa_tex;
  5501. uint32_t mtl_sampler_state;
  5502. } _sg_image;
  5503. _SOKOL_PRIVATE void _sg_init_image(_sg_image* img) {
  5504. SOKOL_ASSERT(img);
  5505. memset(img, 0, sizeof(_sg_image));
  5506. }
  5507. typedef struct {
  5508. int size;
  5509. } _sg_uniform_block;
  5510. typedef struct {
  5511. sg_image_type type;
  5512. } _sg_shader_image;
  5513. typedef struct {
  5514. int num_uniform_blocks;
  5515. int num_images;
  5516. _sg_uniform_block uniform_blocks[SG_MAX_SHADERSTAGE_UBS];
  5517. _sg_shader_image images[SG_MAX_SHADERSTAGE_IMAGES];
  5518. uint32_t mtl_lib;
  5519. uint32_t mtl_func;
  5520. } _sg_shader_stage;
  5521. typedef struct {
  5522. _sg_slot slot;
  5523. _sg_shader_stage stage[SG_NUM_SHADER_STAGES];
  5524. } _sg_shader;
  5525. _SOKOL_PRIVATE void _sg_init_shader(_sg_shader* shd) {
  5526. SOKOL_ASSERT(shd);
  5527. memset(shd, 0, sizeof(_sg_shader));
  5528. }
  5529. typedef struct {
  5530. _sg_slot slot;
  5531. _sg_shader* shader;
  5532. sg_shader shader_id;
  5533. bool vertex_layout_valid[SG_MAX_SHADERSTAGE_BUFFERS];
  5534. int color_attachment_count;
  5535. sg_pixel_format color_format;
  5536. sg_pixel_format depth_format;
  5537. int sample_count;
  5538. float depth_bias;
  5539. float depth_bias_slope_scale;
  5540. float depth_bias_clamp;
  5541. MTLPrimitiveType mtl_prim_type;
  5542. sg_index_type index_type;
  5543. NSUInteger mtl_index_size;
  5544. MTLIndexType mtl_index_type;
  5545. MTLCullMode mtl_cull_mode;
  5546. MTLWinding mtl_winding;
  5547. float blend_color[4];
  5548. uint32_t mtl_stencil_ref;
  5549. uint32_t mtl_rps;
  5550. uint32_t mtl_dss;
  5551. } _sg_pipeline;
  5552. _SOKOL_PRIVATE void _sg_init_pipeline(_sg_pipeline* pip) {
  5553. SOKOL_ASSERT(pip);
  5554. memset(pip, 0, sizeof(_sg_pipeline));
  5555. }
  5556. typedef struct {
  5557. _sg_image* image;
  5558. sg_image image_id;
  5559. int mip_level;
  5560. int slice;
  5561. } _sg_attachment;
  5562. typedef struct {
  5563. _sg_slot slot;
  5564. int num_color_atts;
  5565. _sg_attachment color_atts[SG_MAX_COLOR_ATTACHMENTS];
  5566. _sg_attachment ds_att;
  5567. } _sg_pass;
  5568. _SOKOL_PRIVATE void _sg_init_pass(_sg_pass* pass) {
  5569. SOKOL_ASSERT(pass);
  5570. memset(pass, 0, sizeof(_sg_pass));
  5571. }
  5572. /*-- a simple state cache for the resource bindings --------------------------*/
  5573. static const _sg_pipeline* _sg_mtl_cur_pipeline;
  5574. static sg_pipeline _sg_mtl_cur_pipeline_id;
  5575. static const _sg_buffer* _sg_mtl_cur_indexbuffer;
  5576. static sg_buffer _sg_mtl_cur_indexbuffer_id;
  5577. static const _sg_buffer* _sg_mtl_cur_vertexbuffers[SG_MAX_SHADERSTAGE_BUFFERS];
  5578. static sg_buffer _sg_mtl_cur_vertexbuffer_ids[SG_MAX_SHADERSTAGE_BUFFERS];
  5579. static const _sg_image* _sg_mtl_cur_vs_images[SG_MAX_SHADERSTAGE_IMAGES];
  5580. static sg_image _sg_mtl_cur_vs_image_ids[SG_MAX_SHADERSTAGE_IMAGES];
  5581. static const _sg_image* _sg_mtl_cur_fs_images[SG_MAX_SHADERSTAGE_IMAGES];
  5582. static sg_image _sg_mtl_cur_fs_image_ids[SG_MAX_SHADERSTAGE_IMAGES];
  5583. _SOKOL_PRIVATE void _sg_mtl_clear_state_cache() {
  5584. _sg_mtl_cur_pipeline = 0;
  5585. _sg_mtl_cur_pipeline_id.id = SG_INVALID_ID;
  5586. _sg_mtl_cur_indexbuffer = 0;
  5587. _sg_mtl_cur_indexbuffer_id.id = SG_INVALID_ID;
  5588. for (int i = 0; i < SG_MAX_SHADERSTAGE_BUFFERS; i++) {
  5589. _sg_mtl_cur_vertexbuffers[i] = 0;
  5590. _sg_mtl_cur_vertexbuffer_ids[i].id = SG_INVALID_ID;
  5591. }
  5592. for (int i = 0; i < SG_MAX_SHADERSTAGE_IMAGES; i++) {
  5593. _sg_mtl_cur_vs_images[i] = 0;
  5594. _sg_mtl_cur_vs_image_ids[i].id = SG_INVALID_ID;
  5595. _sg_mtl_cur_fs_images[i] = 0;
  5596. _sg_mtl_cur_fs_image_ids[i].id = SG_INVALID_ID;
  5597. }
  5598. }
  5599. /*-- main Metal backend state and functions ----------------------------------*/
  5600. static bool _sg_mtl_valid;
  5601. static const void*(*_sg_mtl_renderpass_descriptor_cb)(void);
  5602. static const void*(*_sg_mtl_drawable_cb)(void);
  5603. static id<MTLDevice> _sg_mtl_device;
  5604. static id<MTLCommandQueue> _sg_mtl_cmd_queue;
  5605. static id<MTLCommandBuffer> _sg_mtl_cmd_buffer;
  5606. static id<MTLRenderCommandEncoder> _sg_mtl_cmd_encoder;
  5607. static uint32_t _sg_mtl_frame_index;
  5608. static uint32_t _sg_mtl_cur_frame_rotate_index;
  5609. static uint32_t _sg_mtl_ub_size;
  5610. static uint32_t _sg_mtl_cur_ub_offset;
  5611. static uint8_t* _sg_mtl_cur_ub_base_ptr;
  5612. static id<MTLBuffer> _sg_mtl_uniform_buffers[SG_NUM_INFLIGHT_FRAMES];
  5613. static dispatch_semaphore_t _sg_mtl_sem;
  5614. static bool _sg_mtl_in_pass;
  5615. static bool _sg_mtl_pass_valid;
  5616. static int _sg_mtl_cur_width;
  5617. static int _sg_mtl_cur_height;
  5618. _SOKOL_PRIVATE void _sg_setup_backend(const sg_desc* desc) {
  5619. SOKOL_ASSERT(desc);
  5620. SOKOL_ASSERT(desc->mtl_device);
  5621. SOKOL_ASSERT(desc->mtl_renderpass_descriptor_cb);
  5622. SOKOL_ASSERT(desc->mtl_drawable_cb);
  5623. _sg_mtl_init_pool(desc);
  5624. _sg_mtl_init_sampler_cache(desc);
  5625. _sg_mtl_clear_state_cache();
  5626. _sg_mtl_valid = true;
  5627. _sg_mtl_renderpass_descriptor_cb = desc->mtl_renderpass_descriptor_cb;
  5628. _sg_mtl_drawable_cb = desc->mtl_drawable_cb;
  5629. _sg_mtl_in_pass = false;
  5630. _sg_mtl_pass_valid = false;
  5631. _sg_mtl_cur_width = 0;
  5632. _sg_mtl_cur_height = 0;
  5633. _sg_mtl_frame_index = 1;
  5634. _sg_mtl_cur_frame_rotate_index = 0;
  5635. _sg_mtl_cur_ub_offset = 0;
  5636. _sg_mtl_cur_ub_base_ptr = 0;
  5637. _sg_mtl_device = (__bridge id<MTLDevice>) desc->mtl_device;
  5638. _sg_mtl_sem = dispatch_semaphore_create(SG_NUM_INFLIGHT_FRAMES);
  5639. _sg_mtl_cmd_queue = [_sg_mtl_device newCommandQueue];
  5640. _sg_mtl_ub_size = _sg_def(desc->mtl_global_uniform_buffer_size, _SG_MTL_DEFAULT_UB_SIZE);
  5641. MTLResourceOptions res_opts = MTLResourceCPUCacheModeWriteCombined;
  5642. #if defined(SOKOL_METAL_MACOS)
  5643. res_opts |= MTLResourceStorageModeManaged;
  5644. #endif
  5645. for (int i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  5646. _sg_mtl_uniform_buffers[i] = [_sg_mtl_device
  5647. newBufferWithLength:_sg_mtl_ub_size
  5648. options:res_opts
  5649. ];
  5650. }
  5651. }
  5652. _SOKOL_PRIVATE void _sg_discard_backend() {
  5653. SOKOL_ASSERT(_sg_mtl_valid);
  5654. /* wait for the last frame to finish */
  5655. for (int i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  5656. dispatch_semaphore_wait(_sg_mtl_sem, DISPATCH_TIME_FOREVER);
  5657. }
  5658. _sg_mtl_destroy_sampler_cache(_sg_mtl_frame_index);
  5659. _sg_mtl_garbage_collect(_sg_mtl_frame_index + SG_NUM_INFLIGHT_FRAMES + 2);
  5660. _sg_mtl_destroy_pool();
  5661. _sg_mtl_valid = false;
  5662. _sg_mtl_cmd_encoder = nil;
  5663. _sg_mtl_cmd_buffer = nil;
  5664. _sg_mtl_cmd_queue = nil;
  5665. for (int i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  5666. _sg_mtl_uniform_buffers[i] = nil;
  5667. }
  5668. _sg_mtl_device = nil;
  5669. }
  5670. _SOKOL_PRIVATE bool _sg_query_feature(sg_feature f) {
  5671. switch (f) {
  5672. case SG_FEATURE_INSTANCING:
  5673. #if defined(SOKOL_METAL_MACOS)
  5674. case SG_FEATURE_TEXTURE_COMPRESSION_DXT:
  5675. #else
  5676. case SG_FEATURE_TEXTURE_COMPRESSION_PVRTC:
  5677. case SG_FEATURE_TEXTURE_COMPRESSION_ETC2:
  5678. #endif
  5679. case SG_FEATURE_TEXTURE_FLOAT:
  5680. case SG_FEATURE_ORIGIN_TOP_LEFT:
  5681. case SG_FEATURE_MSAA_RENDER_TARGETS:
  5682. case SG_FEATURE_PACKED_VERTEX_FORMAT_10_2:
  5683. case SG_FEATURE_MULTIPLE_RENDER_TARGET:
  5684. case SG_FEATURE_IMAGETYPE_3D:
  5685. case SG_FEATURE_IMAGETYPE_ARRAY:
  5686. return true;
  5687. default:
  5688. return false;
  5689. }
  5690. }
  5691. _SOKOL_PRIVATE void _sg_create_buffer(_sg_buffer* buf, const sg_buffer_desc* desc) {
  5692. SOKOL_ASSERT(buf && desc);
  5693. SOKOL_ASSERT(buf->slot.state == SG_RESOURCESTATE_ALLOC);
  5694. buf->size = desc->size;
  5695. buf->type = _sg_def(desc->type, SG_BUFFERTYPE_VERTEXBUFFER);
  5696. buf->usage = _sg_def(desc->usage, SG_USAGE_IMMUTABLE);
  5697. buf->upd_frame_index = 0;
  5698. buf->num_slots = (buf->usage == SG_USAGE_IMMUTABLE) ? 1 : SG_NUM_INFLIGHT_FRAMES;
  5699. buf->active_slot = 0;
  5700. const bool injected = (0 != desc->mtl_buffers[0]);
  5701. MTLResourceOptions mtl_options = _sg_mtl_buffer_resource_options(buf->usage);
  5702. for (int slot = 0; slot < buf->num_slots; slot++) {
  5703. id<MTLBuffer> mtl_buf;
  5704. if (injected) {
  5705. SOKOL_ASSERT(desc->mtl_buffers[slot]);
  5706. mtl_buf = (__bridge id<MTLBuffer>) desc->mtl_buffers[slot];
  5707. }
  5708. else {
  5709. if (buf->usage == SG_USAGE_IMMUTABLE) {
  5710. SOKOL_ASSERT(desc->content);
  5711. mtl_buf = [_sg_mtl_device newBufferWithBytes:desc->content length:buf->size options:mtl_options];
  5712. }
  5713. else {
  5714. mtl_buf = [_sg_mtl_device newBufferWithLength:buf->size options:mtl_options];
  5715. }
  5716. }
  5717. buf->mtl_buf[slot] = _sg_mtl_add_resource(mtl_buf);
  5718. }
  5719. buf->slot.state = SG_RESOURCESTATE_VALID;
  5720. }
  5721. _SOKOL_PRIVATE void _sg_destroy_buffer(_sg_buffer* buf) {
  5722. SOKOL_ASSERT(buf);
  5723. if (buf->slot.state == SG_RESOURCESTATE_VALID) {
  5724. for (int slot = 0; slot < buf->num_slots; slot++) {
  5725. _sg_mtl_release_resource(_sg_mtl_frame_index, buf->mtl_buf[slot]);
  5726. }
  5727. }
  5728. _sg_init_buffer(buf);
  5729. }
  5730. _SOKOL_PRIVATE void _sg_mtl_copy_image_content(const _sg_image* img, __unsafe_unretained id<MTLTexture> mtl_tex, const sg_image_content* content) {
  5731. const int num_faces = (img->type == SG_IMAGETYPE_CUBE) ? 6:1;
  5732. const int num_slices = (img->type == SG_IMAGETYPE_ARRAY) ? img->depth : 1;
  5733. for (int face_index = 0; face_index < num_faces; face_index++) {
  5734. for (int mip_index = 0; mip_index < img->num_mipmaps; mip_index++) {
  5735. SOKOL_ASSERT(content->subimage[face_index][mip_index].ptr);
  5736. SOKOL_ASSERT(content->subimage[face_index][mip_index].size > 0);
  5737. const uint8_t* data_ptr = (const uint8_t*)content->subimage[face_index][mip_index].ptr;
  5738. const int mip_width = _sg_max(img->width >> mip_index, 1);
  5739. const int mip_height = _sg_max(img->height >> mip_index, 1);
  5740. /* special case PVRTC formats: bytePerRow must be 0 */
  5741. int bytes_per_row = 0;
  5742. int bytes_per_slice = _sg_surface_pitch(img->pixel_format, mip_width, mip_height);
  5743. if (!_sg_mtl_is_pvrtc(img->pixel_format)) {
  5744. bytes_per_row = _sg_row_pitch(img->pixel_format, mip_width);
  5745. }
  5746. MTLRegion region;
  5747. if (img->type == SG_IMAGETYPE_3D) {
  5748. const int mip_depth = _sg_max(img->depth >> mip_index, 1);
  5749. region = MTLRegionMake3D(0, 0, 0, mip_width, mip_height, mip_depth);
  5750. /* FIXME: apparently the minimal bytes_per_image size for 3D texture
  5751. is 4 KByte... somehow need to handle this */
  5752. }
  5753. else {
  5754. region = MTLRegionMake2D(0, 0, mip_width, mip_height);
  5755. }
  5756. for (int slice_index = 0; slice_index < num_slices; slice_index++) {
  5757. const int mtl_slice_index = (img->type == SG_IMAGETYPE_CUBE) ? face_index : slice_index;
  5758. const int slice_offset = slice_index * bytes_per_slice;
  5759. SOKOL_ASSERT((slice_offset + bytes_per_slice) <= (int)content->subimage[face_index][mip_index].size);
  5760. [mtl_tex replaceRegion:region
  5761. mipmapLevel:mip_index
  5762. slice:mtl_slice_index
  5763. withBytes:data_ptr + slice_offset
  5764. bytesPerRow:bytes_per_row
  5765. bytesPerImage:bytes_per_slice];
  5766. }
  5767. }
  5768. }
  5769. }
  5770. _SOKOL_PRIVATE void _sg_create_image(_sg_image* img, const sg_image_desc* desc) {
  5771. SOKOL_ASSERT(img && desc);
  5772. SOKOL_ASSERT(img->slot.state == SG_RESOURCESTATE_ALLOC);
  5773. img->type = _sg_def(desc->type, SG_IMAGETYPE_2D);
  5774. img->render_target = desc->render_target;
  5775. img->width = desc->width;
  5776. img->height = desc->height;
  5777. img->depth = _sg_def(desc->depth, 1);
  5778. img->num_mipmaps = _sg_def(desc->num_mipmaps, 1);
  5779. img->usage = _sg_def(desc->usage, SG_USAGE_IMMUTABLE);
  5780. img->pixel_format = _sg_def(desc->pixel_format, SG_PIXELFORMAT_RGBA8);
  5781. img->sample_count = _sg_def(desc->sample_count, 1);
  5782. img->min_filter = _sg_def(desc->min_filter, SG_FILTER_NEAREST);
  5783. img->mag_filter = _sg_def(desc->mag_filter, SG_FILTER_NEAREST);
  5784. img->wrap_u = _sg_def(desc->wrap_u, SG_WRAP_REPEAT);
  5785. img->wrap_v = _sg_def(desc->wrap_v, SG_WRAP_REPEAT);
  5786. img->wrap_w = _sg_def(desc->wrap_w, SG_WRAP_REPEAT);
  5787. img->max_anisotropy = _sg_def(desc->max_anisotropy, 1);
  5788. img->upd_frame_index = 0;
  5789. img->num_slots = (img->usage == SG_USAGE_IMMUTABLE) ? 1 :SG_NUM_INFLIGHT_FRAMES;
  5790. img->active_slot = 0;
  5791. const bool injected = (0 != desc->mtl_textures[0]);
  5792. /* first initialize all Metal resource pool slots to 'empty' */
  5793. for (int i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  5794. img->mtl_tex[i] = _sg_mtl_add_resource(nil);
  5795. }
  5796. img->mtl_sampler_state = _sg_mtl_add_resource(nil);
  5797. img->mtl_depth_tex = _sg_mtl_add_resource(nil);
  5798. img->mtl_msaa_tex = _sg_mtl_add_resource(nil);
  5799. /* initialize a Metal texture descriptor with common attributes */
  5800. MTLTextureDescriptor* mtl_desc = [[MTLTextureDescriptor alloc] init];
  5801. mtl_desc.textureType = _sg_mtl_texture_type(img->type);
  5802. if (img->render_target) {
  5803. if (_sg_is_valid_rendertarget_color_format(img->pixel_format)) {
  5804. mtl_desc.pixelFormat = _sg_mtl_rendertarget_color_format(img->pixel_format);
  5805. }
  5806. else {
  5807. mtl_desc.pixelFormat = _sg_mtl_rendertarget_depth_format(img->pixel_format);
  5808. }
  5809. }
  5810. else {
  5811. mtl_desc.pixelFormat = _sg_mtl_texture_format(img->pixel_format);
  5812. }
  5813. if (MTLPixelFormatInvalid == mtl_desc.pixelFormat) {
  5814. SOKOL_LOG("Unsupported texture pixel format!\n");
  5815. img->slot.state = SG_RESOURCESTATE_FAILED;
  5816. return;
  5817. }
  5818. mtl_desc.width = img->width;
  5819. mtl_desc.height = img->height;
  5820. if (SG_IMAGETYPE_3D == img->type) {
  5821. mtl_desc.depth = img->depth;
  5822. }
  5823. else {
  5824. mtl_desc.depth = 1;
  5825. }
  5826. mtl_desc.mipmapLevelCount = img->num_mipmaps;
  5827. if (SG_IMAGETYPE_ARRAY == img->type) {
  5828. mtl_desc.arrayLength = img->depth;
  5829. }
  5830. else {
  5831. mtl_desc.arrayLength = 1;
  5832. }
  5833. if (img->render_target) {
  5834. mtl_desc.resourceOptions = MTLResourceStorageModePrivate;
  5835. mtl_desc.cpuCacheMode = MTLCPUCacheModeDefaultCache;
  5836. mtl_desc.storageMode = MTLStorageModePrivate;
  5837. mtl_desc.usage |= MTLTextureUsageRenderTarget;
  5838. }
  5839. /* special case depth-stencil-buffer? */
  5840. if (_sg_is_valid_rendertarget_depth_format(img->pixel_format)) {
  5841. /* create only a depth texture */
  5842. SOKOL_ASSERT(img->render_target);
  5843. SOKOL_ASSERT(img->type == SG_IMAGETYPE_2D);
  5844. SOKOL_ASSERT(img->num_mipmaps == 1);
  5845. SOKOL_ASSERT(!injected);
  5846. if (img->sample_count > 1) {
  5847. mtl_desc.textureType = MTLTextureType2DMultisample;
  5848. mtl_desc.sampleCount = img->sample_count;
  5849. }
  5850. id<MTLTexture> tex = [_sg_mtl_device newTextureWithDescriptor:mtl_desc];
  5851. SOKOL_ASSERT(nil != tex);
  5852. img->mtl_depth_tex = _sg_mtl_add_resource(tex);
  5853. }
  5854. else {
  5855. /* create the color texture(s) */
  5856. for (int slot = 0; slot < img->num_slots; slot++) {
  5857. id<MTLTexture> tex;
  5858. if (injected) {
  5859. SOKOL_ASSERT(desc->mtl_textures[slot]);
  5860. tex = (__bridge id<MTLTexture>) desc->mtl_textures[slot];
  5861. }
  5862. else {
  5863. tex = [_sg_mtl_device newTextureWithDescriptor:mtl_desc];
  5864. if ((img->usage == SG_USAGE_IMMUTABLE) && !img->render_target) {
  5865. _sg_mtl_copy_image_content(img, tex, &desc->content);
  5866. }
  5867. }
  5868. img->mtl_tex[slot] = _sg_mtl_add_resource(tex);
  5869. }
  5870. /* if MSAA color render target, create an additional MSAA render-surface texture */
  5871. if (img->render_target && (img->sample_count > 1)) {
  5872. mtl_desc.textureType = MTLTextureType2DMultisample;
  5873. mtl_desc.depth = 1;
  5874. mtl_desc.arrayLength = 1;
  5875. mtl_desc.mipmapLevelCount = 1;
  5876. mtl_desc.sampleCount = img->sample_count;
  5877. id<MTLTexture> tex = [_sg_mtl_device newTextureWithDescriptor:mtl_desc];
  5878. img->mtl_msaa_tex = _sg_mtl_add_resource(tex);
  5879. }
  5880. /* create (possibly shared) sampler state */
  5881. img->mtl_sampler_state = _sg_mtl_create_sampler(_sg_mtl_device, desc);
  5882. }
  5883. img->slot.state = SG_RESOURCESTATE_VALID;
  5884. }
  5885. _SOKOL_PRIVATE void _sg_destroy_image(_sg_image* img) {
  5886. SOKOL_ASSERT(img);
  5887. if (img->slot.state == SG_RESOURCESTATE_VALID) {
  5888. for (int slot = 0; slot < img->num_slots; slot++) {
  5889. _sg_mtl_release_resource(_sg_mtl_frame_index, img->mtl_tex[slot]);
  5890. }
  5891. _sg_mtl_release_resource(_sg_mtl_frame_index, img->mtl_depth_tex);
  5892. _sg_mtl_release_resource(_sg_mtl_frame_index, img->mtl_msaa_tex);
  5893. /* NOTE: sampler state objects are shared and not released until shutdown */
  5894. }
  5895. _sg_init_image(img);
  5896. }
  5897. _SOKOL_PRIVATE id<MTLLibrary> _sg_mtl_compile_library(const char* src) {
  5898. NSError* err = NULL;
  5899. id<MTLLibrary> lib = [_sg_mtl_device
  5900. newLibraryWithSource:[NSString stringWithUTF8String:src]
  5901. options:nil
  5902. error:&err
  5903. ];
  5904. if (err) {
  5905. SOKOL_LOG([err.localizedDescription UTF8String]);
  5906. }
  5907. return lib;
  5908. }
  5909. _SOKOL_PRIVATE id<MTLLibrary> _sg_mtl_library_from_bytecode(const uint8_t* ptr, int num_bytes) {
  5910. NSError* err = NULL;
  5911. dispatch_data_t lib_data = dispatch_data_create(ptr, num_bytes, NULL, DISPATCH_DATA_DESTRUCTOR_DEFAULT);
  5912. id<MTLLibrary> lib = [_sg_mtl_device newLibraryWithData:lib_data error:&err];
  5913. if (err) {
  5914. SOKOL_LOG([err.localizedDescription UTF8String]);
  5915. }
  5916. return lib;
  5917. }
  5918. _SOKOL_PRIVATE void _sg_create_shader(_sg_shader* shd, const sg_shader_desc* desc) {
  5919. SOKOL_ASSERT(shd && desc);
  5920. SOKOL_ASSERT(shd->slot.state == SG_RESOURCESTATE_ALLOC);
  5921. /* uniform block sizes and image types */
  5922. for (int stage_index = 0; stage_index < SG_NUM_SHADER_STAGES; stage_index++) {
  5923. const sg_shader_stage_desc* stage_desc = (stage_index == SG_SHADERSTAGE_VS) ? &desc->vs : &desc->fs;
  5924. _sg_shader_stage* stage = &shd->stage[stage_index];
  5925. SOKOL_ASSERT(stage->num_uniform_blocks == 0);
  5926. for (int ub_index = 0; ub_index < SG_MAX_SHADERSTAGE_UBS; ub_index++) {
  5927. const sg_shader_uniform_block_desc* ub_desc = &stage_desc->uniform_blocks[ub_index];
  5928. if (0 == ub_desc->size) {
  5929. break;
  5930. }
  5931. _sg_uniform_block* ub = &stage->uniform_blocks[ub_index];
  5932. ub->size = ub_desc->size;
  5933. stage->num_uniform_blocks++;
  5934. }
  5935. SOKOL_ASSERT(stage->num_images == 0);
  5936. for (int img_index = 0; img_index < SG_MAX_SHADERSTAGE_IMAGES; img_index++) {
  5937. const sg_shader_image_desc* img_desc = &stage_desc->images[img_index];
  5938. if (img_desc->type == _SG_IMAGETYPE_DEFAULT) {
  5939. break;
  5940. }
  5941. stage->images[img_index].type = img_desc->type;
  5942. stage->num_images++;
  5943. }
  5944. }
  5945. /* create metal libray objects and lookup entry functions */
  5946. id<MTLLibrary> vs_lib;
  5947. id<MTLLibrary> fs_lib;
  5948. id<MTLFunction> vs_func;
  5949. id<MTLFunction> fs_func;
  5950. const char* vs_entry = _sg_def(desc->vs.entry, "_main");
  5951. const char* fs_entry = _sg_def(desc->fs.entry, "_main");
  5952. if (desc->vs.byte_code && desc->fs.byte_code) {
  5953. /* separate byte code provided */
  5954. vs_lib = _sg_mtl_library_from_bytecode(desc->vs.byte_code, desc->vs.byte_code_size);
  5955. fs_lib = _sg_mtl_library_from_bytecode(desc->fs.byte_code, desc->fs.byte_code_size);
  5956. if (nil == vs_lib || nil == fs_lib) {
  5957. shd->slot.state = SG_RESOURCESTATE_FAILED;
  5958. return;
  5959. }
  5960. vs_func = [vs_lib newFunctionWithName:[NSString stringWithUTF8String:vs_entry]];
  5961. fs_func = [fs_lib newFunctionWithName:[NSString stringWithUTF8String:fs_entry]];
  5962. }
  5963. else if (desc->vs.source && desc->fs.source) {
  5964. /* separate sources provided */
  5965. vs_lib = _sg_mtl_compile_library(desc->vs.source);
  5966. fs_lib = _sg_mtl_compile_library(desc->fs.source);
  5967. if (nil == vs_lib || nil == fs_lib) {
  5968. shd->slot.state = SG_RESOURCESTATE_FAILED;
  5969. return;
  5970. }
  5971. vs_func = [vs_lib newFunctionWithName:[NSString stringWithUTF8String:vs_entry]];
  5972. fs_func = [fs_lib newFunctionWithName:[NSString stringWithUTF8String:fs_entry]];
  5973. }
  5974. else {
  5975. shd->slot.state = SG_RESOURCESTATE_FAILED;
  5976. return;
  5977. }
  5978. if (nil == vs_func) {
  5979. SOKOL_LOG("vertex shader entry function not found\n");
  5980. shd->slot.state = SG_RESOURCESTATE_FAILED;
  5981. return;
  5982. }
  5983. if (nil == fs_func) {
  5984. SOKOL_LOG("fragment shader entry function not found\n");
  5985. shd->slot.state = SG_RESOURCESTATE_FAILED;
  5986. return;
  5987. }
  5988. /* it is legal to call _sg_mtl_add_resource with a nil value, this will return a special 0xFFFFFFFF index */
  5989. shd->stage[SG_SHADERSTAGE_VS].mtl_lib = _sg_mtl_add_resource(vs_lib);
  5990. shd->stage[SG_SHADERSTAGE_FS].mtl_lib = _sg_mtl_add_resource(fs_lib);
  5991. shd->stage[SG_SHADERSTAGE_VS].mtl_func = _sg_mtl_add_resource(vs_func);
  5992. shd->stage[SG_SHADERSTAGE_FS].mtl_func = _sg_mtl_add_resource(fs_func);
  5993. shd->slot.state = SG_RESOURCESTATE_VALID;
  5994. }
  5995. _SOKOL_PRIVATE void _sg_destroy_shader(_sg_shader* shd) {
  5996. SOKOL_ASSERT(shd);
  5997. if (shd->slot.state == SG_RESOURCESTATE_VALID) {
  5998. /* it is valid to call _sg_mtl_release_resource with the special 0xFFFFFFFF index */
  5999. _sg_mtl_release_resource(_sg_mtl_frame_index, shd->stage[SG_SHADERSTAGE_VS].mtl_func);
  6000. _sg_mtl_release_resource(_sg_mtl_frame_index, shd->stage[SG_SHADERSTAGE_VS].mtl_lib);
  6001. _sg_mtl_release_resource(_sg_mtl_frame_index, shd->stage[SG_SHADERSTAGE_FS].mtl_func);
  6002. _sg_mtl_release_resource(_sg_mtl_frame_index, shd->stage[SG_SHADERSTAGE_FS].mtl_lib);
  6003. }
  6004. _sg_init_shader(shd);
  6005. }
  6006. _SOKOL_PRIVATE void _sg_create_pipeline(_sg_pipeline* pip, _sg_shader* shd, const sg_pipeline_desc* desc) {
  6007. SOKOL_ASSERT(pip && shd && desc);
  6008. SOKOL_ASSERT(pip->slot.state == SG_RESOURCESTATE_ALLOC);
  6009. SOKOL_ASSERT(desc->shader.id == shd->slot.id);
  6010. SOKOL_ASSERT(shd->slot.state == SG_RESOURCESTATE_VALID);
  6011. pip->shader = shd;
  6012. pip->shader_id = desc->shader;
  6013. pip->color_attachment_count = _sg_def(desc->blend.color_attachment_count, 1);
  6014. pip->color_format = _sg_def(desc->blend.color_format, SG_PIXELFORMAT_RGBA8);
  6015. pip->depth_format = _sg_def(desc->blend.depth_format, SG_PIXELFORMAT_DEPTHSTENCIL);
  6016. pip->sample_count = _sg_def(desc->rasterizer.sample_count, 1);
  6017. pip->depth_bias = desc->rasterizer.depth_bias;
  6018. pip->depth_bias_slope_scale = desc->rasterizer.depth_bias_slope_scale;
  6019. pip->depth_bias_clamp = desc->rasterizer.depth_bias_clamp;
  6020. sg_primitive_type prim_type = _sg_def(desc->primitive_type, SG_PRIMITIVETYPE_TRIANGLES);
  6021. pip->mtl_prim_type = _sg_mtl_primitive_type(prim_type);
  6022. pip->index_type = _sg_def(desc->index_type, SG_INDEXTYPE_NONE);
  6023. pip->mtl_index_size = _sg_mtl_index_size(pip->index_type);
  6024. if (SG_INDEXTYPE_NONE != pip->index_type) {
  6025. pip->mtl_index_type = _sg_mtl_index_type(pip->index_type);
  6026. }
  6027. pip->mtl_cull_mode = _sg_mtl_cull_mode(_sg_def(desc->rasterizer.cull_mode, SG_CULLMODE_NONE));
  6028. pip->mtl_winding = _sg_mtl_winding(_sg_def(desc->rasterizer.face_winding, SG_FACEWINDING_CW));
  6029. pip->mtl_stencil_ref = desc->depth_stencil.stencil_ref;
  6030. for (int i = 0; i < 4; i++) {
  6031. pip->blend_color[i] = desc->blend.blend_color[i];
  6032. }
  6033. /* create vertex-descriptor */
  6034. MTLVertexDescriptor* vtx_desc = [MTLVertexDescriptor vertexDescriptor];
  6035. int auto_offset[SG_MAX_SHADERSTAGE_BUFFERS];
  6036. for (int layout_index = 0; layout_index < SG_MAX_SHADERSTAGE_BUFFERS; layout_index++) {
  6037. auto_offset[layout_index] = 0;
  6038. }
  6039. /* to use computed offsets, *all* attr offsets must be 0 */
  6040. bool use_auto_offset = true;
  6041. for (int attr_index = 0; attr_index < SG_MAX_VERTEX_ATTRIBUTES; attr_index++) {
  6042. if (desc->layout.attrs[attr_index].offset != 0) {
  6043. use_auto_offset = false;
  6044. break;
  6045. }
  6046. }
  6047. for (int attr_index = 0; attr_index < SG_MAX_VERTEX_ATTRIBUTES; attr_index++) {
  6048. const sg_vertex_attr_desc* a_desc = &desc->layout.attrs[attr_index];
  6049. if (a_desc->format == SG_VERTEXFORMAT_INVALID) {
  6050. break;
  6051. }
  6052. SOKOL_ASSERT((a_desc->buffer_index >= 0) && (a_desc->buffer_index < SG_MAX_SHADERSTAGE_BUFFERS));
  6053. vtx_desc.attributes[attr_index].format = _sg_mtl_vertex_format(a_desc->format);
  6054. vtx_desc.attributes[attr_index].offset = use_auto_offset ? auto_offset[a_desc->buffer_index] : a_desc->offset;
  6055. vtx_desc.attributes[attr_index].bufferIndex = a_desc->buffer_index + SG_MAX_SHADERSTAGE_UBS;
  6056. auto_offset[a_desc->buffer_index] += _sg_vertexformat_bytesize(a_desc->format);
  6057. pip->vertex_layout_valid[a_desc->buffer_index] = true;
  6058. }
  6059. for (int layout_index = 0; layout_index < SG_MAX_SHADERSTAGE_BUFFERS; layout_index++) {
  6060. if (pip->vertex_layout_valid[layout_index]) {
  6061. const sg_buffer_layout_desc* l_desc = &desc->layout.buffers[layout_index];
  6062. const int mtl_vb_slot = layout_index + SG_MAX_SHADERSTAGE_UBS;
  6063. const int stride = l_desc->stride ? l_desc->stride : auto_offset[layout_index];
  6064. SOKOL_ASSERT(stride > 0);
  6065. vtx_desc.layouts[mtl_vb_slot].stride = stride;
  6066. vtx_desc.layouts[mtl_vb_slot].stepFunction = _sg_mtl_step_function(_sg_def(l_desc->step_func, SG_VERTEXSTEP_PER_VERTEX));
  6067. vtx_desc.layouts[mtl_vb_slot].stepRate = _sg_def(l_desc->step_rate, 1);
  6068. }
  6069. }
  6070. /* render-pipeline descriptor */
  6071. MTLRenderPipelineDescriptor* rp_desc = [[MTLRenderPipelineDescriptor alloc] init];
  6072. rp_desc.vertexDescriptor = vtx_desc;
  6073. SOKOL_ASSERT(shd->stage[SG_SHADERSTAGE_VS].mtl_func != _SG_MTL_INVALID_POOL_INDEX);
  6074. rp_desc.vertexFunction = _sg_mtl_pool[shd->stage[SG_SHADERSTAGE_VS].mtl_func];
  6075. SOKOL_ASSERT(shd->stage[SG_SHADERSTAGE_FS].mtl_func != _SG_MTL_INVALID_POOL_INDEX);
  6076. rp_desc.fragmentFunction = _sg_mtl_pool[shd->stage[SG_SHADERSTAGE_FS].mtl_func];
  6077. rp_desc.sampleCount = _sg_def(desc->rasterizer.sample_count, 1);
  6078. rp_desc.alphaToCoverageEnabled = desc->rasterizer.alpha_to_coverage_enabled;
  6079. rp_desc.alphaToOneEnabled = NO;
  6080. rp_desc.rasterizationEnabled = YES;
  6081. rp_desc.depthAttachmentPixelFormat = _sg_mtl_rendertarget_depth_format(_sg_def(desc->blend.depth_format, SG_PIXELFORMAT_DEPTHSTENCIL));
  6082. rp_desc.stencilAttachmentPixelFormat = _sg_mtl_rendertarget_stencil_format(_sg_def(desc->blend.depth_format, SG_PIXELFORMAT_DEPTHSTENCIL));
  6083. /* FIXME: this only works on macOS 10.13!
  6084. for (int i = 0; i < (SG_MAX_SHADERSTAGE_UBS+SG_MAX_SHADERSTAGE_BUFFERS); i++) {
  6085. rp_desc.vertexBuffers[i].mutability = MTLMutabilityImmutable;
  6086. }
  6087. for (int i = 0; i < SG_MAX_SHADERSTAGE_UBS; i++) {
  6088. rp_desc.fragmentBuffers[i].mutability = MTLMutabilityImmutable;
  6089. }
  6090. */
  6091. const int att_count = _sg_def(desc->blend.color_attachment_count, 1);
  6092. for (int i = 0; i < att_count; i++) {
  6093. rp_desc.colorAttachments[i].pixelFormat = _sg_mtl_rendertarget_color_format(_sg_def(desc->blend.color_format, SG_PIXELFORMAT_RGBA8));
  6094. rp_desc.colorAttachments[i].writeMask = _sg_mtl_color_write_mask((sg_color_mask)_sg_def(desc->blend.color_write_mask, SG_COLORMASK_RGBA));
  6095. rp_desc.colorAttachments[i].blendingEnabled = desc->blend.enabled;
  6096. rp_desc.colorAttachments[i].alphaBlendOperation = _sg_mtl_blend_op(_sg_def(desc->blend.op_alpha, SG_BLENDOP_ADD));
  6097. rp_desc.colorAttachments[i].rgbBlendOperation = _sg_mtl_blend_op(_sg_def(desc->blend.op_rgb, SG_BLENDOP_ADD));
  6098. rp_desc.colorAttachments[i].destinationAlphaBlendFactor = _sg_mtl_blend_factor(_sg_def(desc->blend.dst_factor_alpha, SG_BLENDFACTOR_ZERO));
  6099. rp_desc.colorAttachments[i].destinationRGBBlendFactor = _sg_mtl_blend_factor(_sg_def(desc->blend.dst_factor_rgb, SG_BLENDFACTOR_ZERO));
  6100. rp_desc.colorAttachments[i].sourceAlphaBlendFactor = _sg_mtl_blend_factor(_sg_def(desc->blend.src_factor_alpha, SG_BLENDFACTOR_ONE));
  6101. rp_desc.colorAttachments[i].sourceRGBBlendFactor = _sg_mtl_blend_factor(_sg_def(desc->blend.src_factor_rgb, SG_BLENDFACTOR_ONE));
  6102. }
  6103. NSError* err = NULL;
  6104. id<MTLRenderPipelineState> mtl_rps = [_sg_mtl_device newRenderPipelineStateWithDescriptor:rp_desc error:&err];
  6105. if (nil == mtl_rps) {
  6106. SOKOL_ASSERT(err);
  6107. SOKOL_LOG([err.localizedDescription UTF8String]);
  6108. pip->slot.state = SG_RESOURCESTATE_FAILED;
  6109. return;
  6110. }
  6111. /* depth-stencil-state */
  6112. MTLDepthStencilDescriptor* ds_desc = [[MTLDepthStencilDescriptor alloc] init];
  6113. ds_desc.depthCompareFunction = _sg_mtl_compare_func(_sg_def(desc->depth_stencil.depth_compare_func, SG_COMPAREFUNC_ALWAYS));
  6114. ds_desc.depthWriteEnabled = desc->depth_stencil.depth_write_enabled;
  6115. if (desc->depth_stencil.stencil_enabled) {
  6116. const sg_stencil_state* sb = &desc->depth_stencil.stencil_back;
  6117. ds_desc.backFaceStencil = [[MTLStencilDescriptor alloc] init];
  6118. ds_desc.backFaceStencil.stencilFailureOperation = _sg_mtl_stencil_op(_sg_def(sb->fail_op, SG_STENCILOP_KEEP));
  6119. ds_desc.backFaceStencil.depthFailureOperation = _sg_mtl_stencil_op(_sg_def(sb->depth_fail_op, SG_STENCILOP_KEEP));
  6120. ds_desc.backFaceStencil.depthStencilPassOperation = _sg_mtl_stencil_op(_sg_def(sb->pass_op, SG_STENCILOP_KEEP));
  6121. ds_desc.backFaceStencil.stencilCompareFunction = _sg_mtl_compare_func(_sg_def(sb->compare_func, SG_COMPAREFUNC_ALWAYS));
  6122. ds_desc.backFaceStencil.readMask = desc->depth_stencil.stencil_read_mask;
  6123. ds_desc.backFaceStencil.writeMask = desc->depth_stencil.stencil_write_mask;
  6124. const sg_stencil_state* sf = &desc->depth_stencil.stencil_front;
  6125. ds_desc.frontFaceStencil = [[MTLStencilDescriptor alloc] init];
  6126. ds_desc.frontFaceStencil.stencilFailureOperation = _sg_mtl_stencil_op(_sg_def(sf->fail_op, SG_STENCILOP_KEEP));
  6127. ds_desc.frontFaceStencil.depthFailureOperation = _sg_mtl_stencil_op(_sg_def(sf->depth_fail_op, SG_STENCILOP_KEEP));
  6128. ds_desc.frontFaceStencil.depthStencilPassOperation = _sg_mtl_stencil_op(_sg_def(sf->pass_op, SG_STENCILOP_KEEP));
  6129. ds_desc.frontFaceStencil.stencilCompareFunction = _sg_mtl_compare_func(_sg_def(sf->compare_func, SG_COMPAREFUNC_ALWAYS));
  6130. ds_desc.frontFaceStencil.readMask = desc->depth_stencil.stencil_read_mask;
  6131. ds_desc.frontFaceStencil.writeMask = desc->depth_stencil.stencil_write_mask;
  6132. }
  6133. id<MTLDepthStencilState> mtl_dss = [_sg_mtl_device newDepthStencilStateWithDescriptor:ds_desc];
  6134. pip->mtl_rps = _sg_mtl_add_resource(mtl_rps);
  6135. pip->mtl_dss = _sg_mtl_add_resource(mtl_dss);
  6136. pip->slot.state = SG_RESOURCESTATE_VALID;
  6137. }
  6138. _SOKOL_PRIVATE void _sg_destroy_pipeline(_sg_pipeline* pip) {
  6139. SOKOL_ASSERT(pip);
  6140. if (pip->slot.state == SG_RESOURCESTATE_VALID) {
  6141. _sg_mtl_release_resource(_sg_mtl_frame_index, pip->mtl_rps);
  6142. _sg_mtl_release_resource(_sg_mtl_frame_index, pip->mtl_dss);
  6143. }
  6144. _sg_init_pipeline(pip);
  6145. }
  6146. _SOKOL_PRIVATE void _sg_create_pass(_sg_pass* pass, _sg_image** att_images, const sg_pass_desc* desc) {
  6147. SOKOL_ASSERT(pass && desc);
  6148. SOKOL_ASSERT(pass->slot.state == SG_RESOURCESTATE_ALLOC);
  6149. SOKOL_ASSERT(att_images && att_images[0]);
  6150. /* copy image pointers and desc attributes */
  6151. const sg_attachment_desc* att_desc;
  6152. _sg_attachment* att;
  6153. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  6154. SOKOL_ASSERT(0 == pass->color_atts[i].image);
  6155. att_desc = &desc->color_attachments[i];
  6156. if (att_desc->image.id != SG_INVALID_ID) {
  6157. pass->num_color_atts++;
  6158. SOKOL_ASSERT(att_images[i] && (att_images[i]->slot.id == att_desc->image.id));
  6159. SOKOL_ASSERT(_sg_is_valid_rendertarget_color_format(att_images[i]->pixel_format));
  6160. att = &pass->color_atts[i];
  6161. SOKOL_ASSERT((att->image == 0) && (att->image_id.id == SG_INVALID_ID));
  6162. att->image = att_images[i];
  6163. att->image_id = att_desc->image;
  6164. att->mip_level = att_desc->mip_level;
  6165. att->slice = att_desc->slice;
  6166. }
  6167. }
  6168. SOKOL_ASSERT(0 == pass->ds_att.image);
  6169. att_desc = &desc->depth_stencil_attachment;
  6170. const int ds_img_index = SG_MAX_COLOR_ATTACHMENTS;
  6171. if (att_desc->image.id != SG_INVALID_ID) {
  6172. SOKOL_ASSERT(att_images[ds_img_index] && (att_images[ds_img_index]->slot.id == att_desc->image.id));
  6173. SOKOL_ASSERT(_sg_is_valid_rendertarget_depth_format(att_images[ds_img_index]->pixel_format));
  6174. att = &pass->ds_att;
  6175. SOKOL_ASSERT((att->image == 0) && (att->image_id.id == SG_INVALID_ID));
  6176. att->image = att_images[ds_img_index];
  6177. att->image_id = att_desc->image;
  6178. att->mip_level = att_desc->mip_level;
  6179. att->slice = att_desc->slice;
  6180. }
  6181. pass->slot.state = SG_RESOURCESTATE_VALID;
  6182. }
  6183. _SOKOL_PRIVATE void _sg_destroy_pass(_sg_pass* pass) {
  6184. SOKOL_ASSERT(pass);
  6185. _sg_init_pass(pass);
  6186. }
  6187. _SOKOL_PRIVATE void _sg_begin_pass(_sg_pass* pass, const sg_pass_action* action, int w, int h) {
  6188. SOKOL_ASSERT(action);
  6189. SOKOL_ASSERT(!_sg_mtl_in_pass);
  6190. SOKOL_ASSERT(_sg_mtl_cmd_queue);
  6191. SOKOL_ASSERT(!_sg_mtl_cmd_encoder);
  6192. SOKOL_ASSERT(_sg_mtl_renderpass_descriptor_cb);
  6193. _sg_mtl_in_pass = true;
  6194. _sg_mtl_cur_width = w;
  6195. _sg_mtl_cur_height = h;
  6196. _sg_mtl_clear_state_cache();
  6197. /* if this is the first pass in the frame, create a command buffer */
  6198. if (nil == _sg_mtl_cmd_buffer) {
  6199. /* block until the oldest frame in flight has finished */
  6200. dispatch_semaphore_wait(_sg_mtl_sem, DISPATCH_TIME_FOREVER);
  6201. _sg_mtl_cmd_buffer = [_sg_mtl_cmd_queue commandBufferWithUnretainedReferences];
  6202. }
  6203. /* if this is first pass in frame, get uniform buffer base pointer */
  6204. if (0 == _sg_mtl_cur_ub_base_ptr) {
  6205. _sg_mtl_cur_ub_base_ptr = (uint8_t*)[_sg_mtl_uniform_buffers[_sg_mtl_cur_frame_rotate_index] contents];
  6206. }
  6207. /* initialize a render pass descriptor */
  6208. MTLRenderPassDescriptor* pass_desc = nil;
  6209. if (pass) {
  6210. /* offscreen render pass */
  6211. pass_desc = [MTLRenderPassDescriptor renderPassDescriptor];
  6212. }
  6213. else {
  6214. /* default render pass, call user-provided callback to provide render pass descriptor */
  6215. pass_desc = (__bridge MTLRenderPassDescriptor*) _sg_mtl_renderpass_descriptor_cb();
  6216. }
  6217. if (pass_desc) {
  6218. _sg_mtl_pass_valid = true;
  6219. }
  6220. else {
  6221. /* default pass descriptor will not be valid if window is minized,
  6222. don't do any rendering in this case */
  6223. _sg_mtl_pass_valid = false;
  6224. return;
  6225. }
  6226. if (pass) {
  6227. /* setup pass descriptor for offscreen rendering */
  6228. SOKOL_ASSERT(pass->slot.state == SG_RESOURCESTATE_VALID);
  6229. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  6230. const _sg_attachment* att = &pass->color_atts[i];
  6231. if (0 == att->image) {
  6232. break;
  6233. }
  6234. SOKOL_ASSERT(att->image->slot.state == SG_RESOURCESTATE_VALID);
  6235. SOKOL_ASSERT(att->image->slot.id == att->image_id.id);
  6236. const bool is_msaa = (att->image->sample_count > 1);
  6237. pass_desc.colorAttachments[i].loadAction = _sg_mtl_load_action(action->colors[i].action);
  6238. pass_desc.colorAttachments[i].storeAction = is_msaa ? MTLStoreActionMultisampleResolve : MTLStoreActionStore;
  6239. const float* c = &(action->colors[i].val[0]);
  6240. pass_desc.colorAttachments[i].clearColor = MTLClearColorMake(c[0], c[1], c[2], c[3]);
  6241. if (is_msaa) {
  6242. SOKOL_ASSERT(att->image->mtl_msaa_tex != _SG_MTL_INVALID_POOL_INDEX);
  6243. SOKOL_ASSERT(att->image->mtl_tex[att->image->active_slot] != _SG_MTL_INVALID_POOL_INDEX);
  6244. pass_desc.colorAttachments[i].texture = _sg_mtl_pool[att->image->mtl_msaa_tex];
  6245. pass_desc.colorAttachments[i].resolveTexture = _sg_mtl_pool[att->image->mtl_tex[att->image->active_slot]];
  6246. pass_desc.colorAttachments[i].resolveLevel = att->mip_level;
  6247. switch (att->image->type) {
  6248. case SG_IMAGETYPE_CUBE:
  6249. case SG_IMAGETYPE_ARRAY:
  6250. pass_desc.colorAttachments[i].resolveSlice = att->slice;
  6251. break;
  6252. case SG_IMAGETYPE_3D:
  6253. pass_desc.colorAttachments[i].resolveDepthPlane = att->slice;
  6254. break;
  6255. default: break;
  6256. }
  6257. }
  6258. else {
  6259. SOKOL_ASSERT(att->image->mtl_tex[att->image->active_slot] != _SG_MTL_INVALID_POOL_INDEX);
  6260. pass_desc.colorAttachments[i].texture = _sg_mtl_pool[att->image->mtl_tex[att->image->active_slot]];
  6261. pass_desc.colorAttachments[i].level = att->mip_level;
  6262. switch (att->image->type) {
  6263. case SG_IMAGETYPE_CUBE:
  6264. case SG_IMAGETYPE_ARRAY:
  6265. pass_desc.colorAttachments[i].slice = att->slice;
  6266. break;
  6267. case SG_IMAGETYPE_3D:
  6268. pass_desc.colorAttachments[i].depthPlane = att->slice;
  6269. break;
  6270. default: break;
  6271. }
  6272. }
  6273. }
  6274. if (0 != pass->ds_att.image) {
  6275. const _sg_attachment* att = &pass->ds_att;
  6276. SOKOL_ASSERT(att->image->slot.state == SG_RESOURCESTATE_VALID);
  6277. SOKOL_ASSERT(att->image->slot.id == att->image_id.id);
  6278. SOKOL_ASSERT(att->image->mtl_depth_tex != _SG_MTL_INVALID_POOL_INDEX);
  6279. pass_desc.depthAttachment.texture = _sg_mtl_pool[att->image->mtl_depth_tex];
  6280. pass_desc.depthAttachment.loadAction = _sg_mtl_load_action(action->depth.action);
  6281. pass_desc.depthAttachment.clearDepth = action->depth.val;
  6282. if (_sg_is_depth_stencil_format(att->image->pixel_format)) {
  6283. pass_desc.stencilAttachment.texture = _sg_mtl_pool[att->image->mtl_depth_tex];
  6284. pass_desc.stencilAttachment.loadAction = _sg_mtl_load_action(action->stencil.action);
  6285. pass_desc.stencilAttachment.clearStencil = action->stencil.val;
  6286. }
  6287. }
  6288. }
  6289. else {
  6290. /* setup pass descriptor for default rendering */
  6291. pass_desc.colorAttachments[0].loadAction = _sg_mtl_load_action(action->colors[0].action);
  6292. const float* c = &(action->colors[0].val[0]);
  6293. pass_desc.colorAttachments[0].clearColor = MTLClearColorMake(c[0], c[1], c[2], c[3]);
  6294. pass_desc.depthAttachment.loadAction = _sg_mtl_load_action(action->depth.action);
  6295. pass_desc.depthAttachment.clearDepth = action->depth.val;
  6296. pass_desc.stencilAttachment.loadAction = _sg_mtl_load_action(action->stencil.action);
  6297. pass_desc.stencilAttachment.clearStencil = action->stencil.val;
  6298. }
  6299. /* create a render command encoder, this might return nil if window is minimized */
  6300. _sg_mtl_cmd_encoder = [_sg_mtl_cmd_buffer renderCommandEncoderWithDescriptor:pass_desc];
  6301. if (_sg_mtl_cmd_encoder == nil) {
  6302. _sg_mtl_pass_valid = false;
  6303. return;
  6304. }
  6305. /* bind the global uniform buffer, this only happens once per pass */
  6306. for (int slot = 0; slot < SG_MAX_SHADERSTAGE_UBS; slot++) {
  6307. [_sg_mtl_cmd_encoder
  6308. setVertexBuffer:_sg_mtl_uniform_buffers[_sg_mtl_cur_frame_rotate_index]
  6309. offset:0
  6310. atIndex:slot];
  6311. [_sg_mtl_cmd_encoder
  6312. setFragmentBuffer:_sg_mtl_uniform_buffers[_sg_mtl_cur_frame_rotate_index]
  6313. offset:0
  6314. atIndex:slot];
  6315. }
  6316. }
  6317. _SOKOL_PRIVATE void _sg_end_pass() {
  6318. SOKOL_ASSERT(_sg_mtl_in_pass);
  6319. _sg_mtl_in_pass = false;
  6320. _sg_mtl_pass_valid = false;
  6321. if (nil != _sg_mtl_cmd_encoder) {
  6322. [_sg_mtl_cmd_encoder endEncoding];
  6323. _sg_mtl_cmd_encoder = nil;
  6324. }
  6325. }
  6326. _SOKOL_PRIVATE void _sg_commit() {
  6327. SOKOL_ASSERT(!_sg_mtl_in_pass);
  6328. SOKOL_ASSERT(!_sg_mtl_pass_valid);
  6329. SOKOL_ASSERT(_sg_mtl_drawable_cb);
  6330. SOKOL_ASSERT(nil == _sg_mtl_cmd_encoder);
  6331. SOKOL_ASSERT(nil != _sg_mtl_cmd_buffer);
  6332. #if defined(SOKOL_METAL_MACOS)
  6333. [_sg_mtl_uniform_buffers[_sg_mtl_cur_frame_rotate_index] didModifyRange:NSMakeRange(0, _sg_mtl_cur_ub_offset)];
  6334. #endif
  6335. /* present, commit and signal semaphore when done */
  6336. id<MTLDrawable> cur_drawable = (__bridge id<MTLDrawable>) _sg_mtl_drawable_cb();
  6337. [_sg_mtl_cmd_buffer presentDrawable:cur_drawable];
  6338. __block dispatch_semaphore_t sem = _sg_mtl_sem;
  6339. [_sg_mtl_cmd_buffer addCompletedHandler:^(id<MTLCommandBuffer> cmd_buffer) {
  6340. dispatch_semaphore_signal(sem);
  6341. }];
  6342. [_sg_mtl_cmd_buffer commit];
  6343. /* garbage-collect resources pending for release */
  6344. _sg_mtl_garbage_collect(_sg_mtl_frame_index);
  6345. /* rotate uniform buffer slot */
  6346. if (++_sg_mtl_cur_frame_rotate_index >= SG_NUM_INFLIGHT_FRAMES) {
  6347. _sg_mtl_cur_frame_rotate_index = 0;
  6348. }
  6349. _sg_mtl_frame_index++;
  6350. _sg_mtl_cur_ub_offset = 0;
  6351. _sg_mtl_cur_ub_base_ptr = 0;
  6352. _sg_mtl_cmd_buffer = nil;
  6353. }
  6354. _SOKOL_PRIVATE void _sg_apply_viewport(int x, int y, int w, int h, bool origin_top_left) {
  6355. SOKOL_ASSERT(_sg_mtl_in_pass);
  6356. if (!_sg_mtl_pass_valid) {
  6357. return;
  6358. }
  6359. SOKOL_ASSERT(_sg_mtl_cmd_encoder);
  6360. MTLViewport vp;
  6361. vp.originX = (double) x;
  6362. vp.originY = (double) (origin_top_left ? y : (_sg_mtl_cur_height - (y + h)));
  6363. vp.width = (double) w;
  6364. vp.height = (double) h;
  6365. vp.znear = 0.0;
  6366. vp.zfar = 1.0;
  6367. [_sg_mtl_cmd_encoder setViewport:vp];
  6368. }
  6369. _SOKOL_PRIVATE void _sg_apply_scissor_rect(int x, int y, int w, int h, bool origin_top_left) {
  6370. SOKOL_ASSERT(_sg_mtl_in_pass);
  6371. if (!_sg_mtl_pass_valid) {
  6372. return;
  6373. }
  6374. SOKOL_ASSERT(_sg_mtl_cmd_encoder);
  6375. /* clip against framebuffer rect */
  6376. x = _sg_min(_sg_max(0, x), _sg_mtl_cur_width-1);
  6377. y = _sg_min(_sg_max(0, y), _sg_mtl_cur_height-1);
  6378. if ((x + w) > _sg_mtl_cur_width) {
  6379. w = _sg_mtl_cur_width - x;
  6380. }
  6381. if ((y + h) > _sg_mtl_cur_height) {
  6382. h = _sg_mtl_cur_height - y;
  6383. }
  6384. w = _sg_max(w, 1);
  6385. h = _sg_max(h, 1);
  6386. MTLScissorRect r;
  6387. r.x = x;
  6388. r.y = origin_top_left ? y : (_sg_mtl_cur_height - (y + h));
  6389. r.width = w;
  6390. r.height = h;
  6391. [_sg_mtl_cmd_encoder setScissorRect:r];
  6392. }
  6393. _SOKOL_PRIVATE void _sg_apply_draw_state(
  6394. _sg_pipeline* pip,
  6395. _sg_buffer** vbs, int num_vbs, _sg_buffer* ib,
  6396. _sg_image** vs_imgs, int num_vs_imgs,
  6397. _sg_image** fs_imgs, int num_fs_imgs)
  6398. {
  6399. SOKOL_ASSERT(pip);
  6400. SOKOL_ASSERT(pip->shader);
  6401. SOKOL_ASSERT(_sg_mtl_in_pass);
  6402. if (!_sg_mtl_pass_valid) {
  6403. return;
  6404. }
  6405. SOKOL_ASSERT(_sg_mtl_cmd_encoder);
  6406. /* store index buffer binding, this will be needed later in sg_draw() */
  6407. _sg_mtl_cur_indexbuffer = ib;
  6408. if (ib) {
  6409. SOKOL_ASSERT(pip->index_type != SG_INDEXTYPE_NONE);
  6410. _sg_mtl_cur_indexbuffer_id.id = ib->slot.id;
  6411. }
  6412. else {
  6413. SOKOL_ASSERT(pip->index_type == SG_INDEXTYPE_NONE);
  6414. _sg_mtl_cur_indexbuffer_id.id = SG_INVALID_ID;
  6415. }
  6416. /* apply pipeline state */
  6417. if ((_sg_mtl_cur_pipeline != pip) || (_sg_mtl_cur_pipeline_id.id != pip->slot.id)) {
  6418. _sg_mtl_cur_pipeline = pip;
  6419. _sg_mtl_cur_pipeline_id.id = pip->slot.id;
  6420. const float* c = pip->blend_color;
  6421. /* FIXME: those should be filtered through a simple state cache */
  6422. [_sg_mtl_cmd_encoder setBlendColorRed:c[0] green:c[1] blue:c[2] alpha:c[3]];
  6423. [_sg_mtl_cmd_encoder setCullMode:pip->mtl_cull_mode];
  6424. [_sg_mtl_cmd_encoder setFrontFacingWinding:pip->mtl_winding];
  6425. [_sg_mtl_cmd_encoder setStencilReferenceValue:pip->mtl_stencil_ref];
  6426. [_sg_mtl_cmd_encoder setDepthBias:pip->depth_bias slopeScale:pip->depth_bias_slope_scale clamp:pip->depth_bias_clamp];
  6427. SOKOL_ASSERT(pip->mtl_rps != _SG_MTL_INVALID_POOL_INDEX);
  6428. [_sg_mtl_cmd_encoder setRenderPipelineState:_sg_mtl_pool[pip->mtl_rps]];
  6429. SOKOL_ASSERT(pip->mtl_dss != _SG_MTL_INVALID_POOL_INDEX);
  6430. [_sg_mtl_cmd_encoder setDepthStencilState:_sg_mtl_pool[pip->mtl_dss]];
  6431. }
  6432. /* apply vertex buffers */
  6433. int slot;
  6434. for (slot = 0; slot < num_vbs; slot++) {
  6435. const _sg_buffer* vb = vbs[slot];
  6436. if ((_sg_mtl_cur_vertexbuffers[slot] != vb) || (_sg_mtl_cur_vertexbuffer_ids[slot].id != vb->slot.id)) {
  6437. _sg_mtl_cur_vertexbuffers[slot] = vb;
  6438. _sg_mtl_cur_vertexbuffer_ids[slot].id = vb->slot.id;
  6439. const NSUInteger mtl_slot = SG_MAX_SHADERSTAGE_UBS + slot;
  6440. SOKOL_ASSERT(vb->mtl_buf[vb->active_slot] != _SG_MTL_INVALID_POOL_INDEX);
  6441. [_sg_mtl_cmd_encoder setVertexBuffer:_sg_mtl_pool[vb->mtl_buf[vb->active_slot]] offset:0 atIndex:mtl_slot];
  6442. }
  6443. }
  6444. /* apply vertex shader images */
  6445. for (slot = 0; slot < num_vs_imgs; slot++) {
  6446. const _sg_image* img = vs_imgs[slot];
  6447. if ((_sg_mtl_cur_vs_images[slot] != img) || (_sg_mtl_cur_vs_image_ids[slot].id != img->slot.id)) {
  6448. _sg_mtl_cur_vs_images[slot] = img;
  6449. _sg_mtl_cur_vs_image_ids[slot].id = img->slot.id;
  6450. SOKOL_ASSERT(img->mtl_tex[img->active_slot] != _SG_MTL_INVALID_POOL_INDEX);
  6451. [_sg_mtl_cmd_encoder setVertexTexture:_sg_mtl_pool[img->mtl_tex[img->active_slot]] atIndex:slot];
  6452. SOKOL_ASSERT(img->mtl_sampler_state != _SG_MTL_INVALID_POOL_INDEX);
  6453. [_sg_mtl_cmd_encoder setVertexSamplerState:_sg_mtl_pool[img->mtl_sampler_state] atIndex:slot];
  6454. }
  6455. }
  6456. /* apply fragment shader images */
  6457. for (slot = 0; slot < num_fs_imgs; slot++) {
  6458. const _sg_image* img = fs_imgs[slot];
  6459. if ((_sg_mtl_cur_fs_images[slot] != img) || (_sg_mtl_cur_fs_image_ids[slot].id != img->slot.id)) {
  6460. _sg_mtl_cur_fs_images[slot] = img;
  6461. _sg_mtl_cur_fs_image_ids[slot].id = img->slot.id;
  6462. SOKOL_ASSERT(img->mtl_tex[img->active_slot] != _SG_MTL_INVALID_POOL_INDEX);
  6463. [_sg_mtl_cmd_encoder setFragmentTexture:_sg_mtl_pool[img->mtl_tex[img->active_slot]] atIndex:slot];
  6464. SOKOL_ASSERT(img->mtl_sampler_state != _SG_MTL_INVALID_POOL_INDEX);
  6465. [_sg_mtl_cmd_encoder setFragmentSamplerState:_sg_mtl_pool[img->mtl_sampler_state] atIndex:slot];
  6466. }
  6467. }
  6468. }
  6469. #define _sg_mtl_roundup(val, round_to) (((val)+((round_to)-1))&~((round_to)-1))
  6470. _SOKOL_PRIVATE void _sg_apply_uniform_block(sg_shader_stage stage_index, int ub_index, const void* data, int num_bytes) {
  6471. SOKOL_ASSERT(_sg_mtl_in_pass);
  6472. if (!_sg_mtl_pass_valid) {
  6473. return;
  6474. }
  6475. SOKOL_ASSERT(_sg_mtl_cmd_encoder);
  6476. SOKOL_ASSERT(data && (num_bytes > 0));
  6477. SOKOL_ASSERT((stage_index >= 0) && ((int)stage_index < SG_NUM_SHADER_STAGES));
  6478. SOKOL_ASSERT((ub_index >= 0) && (ub_index < SG_MAX_SHADERSTAGE_UBS));
  6479. SOKOL_ASSERT((_sg_mtl_cur_ub_offset + num_bytes) <= _sg_mtl_ub_size);
  6480. SOKOL_ASSERT((_sg_mtl_cur_ub_offset & (_SG_MTL_UB_ALIGN-1)) == 0);
  6481. SOKOL_ASSERT(_sg_mtl_cur_pipeline && _sg_mtl_cur_pipeline->shader);
  6482. SOKOL_ASSERT(_sg_mtl_cur_pipeline->slot.id == _sg_mtl_cur_pipeline_id.id);
  6483. SOKOL_ASSERT(_sg_mtl_cur_pipeline->shader->slot.id == _sg_mtl_cur_pipeline->shader_id.id);
  6484. SOKOL_ASSERT(ub_index < _sg_mtl_cur_pipeline->shader->stage[stage_index].num_uniform_blocks);
  6485. SOKOL_ASSERT(num_bytes <= _sg_mtl_cur_pipeline->shader->stage[stage_index].uniform_blocks[ub_index].size);
  6486. /* copy to global uniform buffer, record offset into cmd encoder, and advance offset */
  6487. uint8_t* dst = &_sg_mtl_cur_ub_base_ptr[_sg_mtl_cur_ub_offset];
  6488. memcpy(dst, data, num_bytes);
  6489. if (stage_index == SG_SHADERSTAGE_VS) {
  6490. [_sg_mtl_cmd_encoder setVertexBufferOffset:_sg_mtl_cur_ub_offset atIndex:ub_index];
  6491. }
  6492. else {
  6493. [_sg_mtl_cmd_encoder setFragmentBufferOffset:_sg_mtl_cur_ub_offset atIndex:ub_index];
  6494. }
  6495. _sg_mtl_cur_ub_offset = _sg_mtl_roundup(_sg_mtl_cur_ub_offset + num_bytes, _SG_MTL_UB_ALIGN);
  6496. }
  6497. _SOKOL_PRIVATE void _sg_draw(int base_element, int num_elements, int num_instances) {
  6498. SOKOL_ASSERT(_sg_mtl_in_pass);
  6499. if (!_sg_mtl_pass_valid) {
  6500. return;
  6501. }
  6502. SOKOL_ASSERT(_sg_mtl_cmd_encoder);
  6503. SOKOL_ASSERT(_sg_mtl_cur_pipeline && (_sg_mtl_cur_pipeline->slot.id == _sg_mtl_cur_pipeline_id.id));
  6504. if (SG_INDEXTYPE_NONE != _sg_mtl_cur_pipeline->index_type) {
  6505. /* indexed rendering */
  6506. SOKOL_ASSERT(_sg_mtl_cur_indexbuffer && (_sg_mtl_cur_indexbuffer->slot.id == _sg_mtl_cur_indexbuffer_id.id));
  6507. const _sg_buffer* ib = _sg_mtl_cur_indexbuffer;
  6508. SOKOL_ASSERT(ib->mtl_buf[ib->active_slot] != _SG_MTL_INVALID_POOL_INDEX);
  6509. const NSUInteger index_buffer_offset = base_element * _sg_mtl_cur_pipeline->mtl_index_size;
  6510. [_sg_mtl_cmd_encoder drawIndexedPrimitives:_sg_mtl_cur_pipeline->mtl_prim_type
  6511. indexCount:num_elements
  6512. indexType:_sg_mtl_cur_pipeline->mtl_index_type
  6513. indexBuffer:_sg_mtl_pool[ib->mtl_buf[ib->active_slot]]
  6514. indexBufferOffset:index_buffer_offset
  6515. instanceCount:num_instances];
  6516. }
  6517. else {
  6518. /* non-indexed rendering */
  6519. [_sg_mtl_cmd_encoder drawPrimitives:_sg_mtl_cur_pipeline->mtl_prim_type
  6520. vertexStart:base_element
  6521. vertexCount:num_elements
  6522. instanceCount:num_instances];
  6523. }
  6524. }
  6525. _SOKOL_PRIVATE void _sg_update_buffer(_sg_buffer* buf, const void* data, int data_size) {
  6526. SOKOL_ASSERT(buf && data && (data_size > 0));
  6527. if (++buf->active_slot >= buf->num_slots) {
  6528. buf->active_slot = 0;
  6529. }
  6530. __unsafe_unretained id<MTLBuffer> mtl_buf = _sg_mtl_pool[buf->mtl_buf[buf->active_slot]];
  6531. void* dst_ptr = [mtl_buf contents];
  6532. memcpy(dst_ptr, data, data_size);
  6533. #if defined(SOKOL_METAL_MACOS)
  6534. [mtl_buf didModifyRange:NSMakeRange(0, data_size)];
  6535. #endif
  6536. }
  6537. _SOKOL_PRIVATE void _sg_update_image(_sg_image* img, const sg_image_content* data) {
  6538. SOKOL_ASSERT(img && data);
  6539. if (++img->active_slot >= img->num_slots) {
  6540. img->active_slot = 0;
  6541. }
  6542. __unsafe_unretained id<MTLTexture> mtl_tex = _sg_mtl_pool[img->mtl_tex[img->active_slot]];
  6543. _sg_mtl_copy_image_content(img, mtl_tex, data);
  6544. }
  6545. _SOKOL_PRIVATE void _sg_reset_state_cache() {
  6546. _sg_mtl_clear_state_cache();
  6547. }
  6548. #ifdef __cplusplus
  6549. } /* extern "C" */
  6550. #endif
  6551. #else
  6552. #error "No rendering backend selected"
  6553. #endif
  6554. #ifdef __cplusplus
  6555. extern "C" {
  6556. #endif
  6557. /*== RESOURCE POOLS ==========================================================*/
  6558. typedef struct {
  6559. int size;
  6560. uint32_t unique_counter;
  6561. int queue_top;
  6562. int* free_queue;
  6563. } _sg_pool;
  6564. _SOKOL_PRIVATE void _sg_init_pool(_sg_pool* pool, int num) {
  6565. SOKOL_ASSERT(pool && (num > 1));
  6566. /* slot 0 is reserved for the 'invalid id', so bump the pool size by 1 */
  6567. pool->size = num + 1;
  6568. pool->queue_top = 0;
  6569. pool->unique_counter = 0;
  6570. /* it's not a bug to only reserve 'num' here */
  6571. pool->free_queue = (int*) SOKOL_MALLOC(sizeof(int)*num);
  6572. SOKOL_ASSERT(pool->free_queue);
  6573. /* never allocate the zero-th pool item since the invalid id is 0 */
  6574. for (int i = pool->size-1; i >= 1; i--) {
  6575. pool->free_queue[pool->queue_top++] = i;
  6576. }
  6577. }
  6578. _SOKOL_PRIVATE void _sg_discard_pool(_sg_pool* pool) {
  6579. SOKOL_ASSERT(pool);
  6580. SOKOL_FREE(pool->free_queue);
  6581. pool->free_queue = 0;
  6582. pool->size = 0;
  6583. pool->queue_top = 0;
  6584. pool->unique_counter = 0;
  6585. }
  6586. _SOKOL_PRIVATE uint32_t _sg_pool_alloc_id(_sg_pool* pool) {
  6587. SOKOL_ASSERT(pool);
  6588. SOKOL_ASSERT(pool->free_queue);
  6589. if (pool->queue_top > 0) {
  6590. int slot_index = pool->free_queue[--pool->queue_top];
  6591. return ((pool->unique_counter++)<<_SG_SLOT_SHIFT)|slot_index;
  6592. }
  6593. else {
  6594. /* pool exhausted */
  6595. return SG_INVALID_ID;
  6596. }
  6597. }
  6598. _SOKOL_PRIVATE void _sg_pool_free_id(_sg_pool* pool, uint32_t id) {
  6599. SOKOL_ASSERT(id != SG_INVALID_ID);
  6600. SOKOL_ASSERT(pool);
  6601. SOKOL_ASSERT(pool->free_queue);
  6602. SOKOL_ASSERT(pool->queue_top < pool->size);
  6603. #ifdef SOKOL_DEBUG
  6604. /* debug check against double-free */
  6605. int slot_index = _sg_slot_index(id);
  6606. for (int i = 0; i < pool->queue_top; i++) {
  6607. SOKOL_ASSERT(pool->free_queue[i] != slot_index);
  6608. }
  6609. #endif
  6610. pool->free_queue[pool->queue_top++] = id;
  6611. SOKOL_ASSERT(pool->queue_top <= (pool->size-1));
  6612. }
  6613. typedef struct {
  6614. _sg_pool buffer_pool;
  6615. _sg_pool image_pool;
  6616. _sg_pool shader_pool;
  6617. _sg_pool pipeline_pool;
  6618. _sg_pool pass_pool;
  6619. _sg_buffer* buffers;
  6620. _sg_image* images;
  6621. _sg_shader* shaders;
  6622. _sg_pipeline* pipelines;
  6623. _sg_pass* passes;
  6624. } _sg_pools;
  6625. _SOKOL_PRIVATE void _sg_setup_pools(_sg_pools* p, const sg_desc* desc) {
  6626. SOKOL_ASSERT(p);
  6627. SOKOL_ASSERT(desc);
  6628. /* note: the pools here will have an additional item, since slot 0 is reserved */
  6629. SOKOL_ASSERT((desc->buffer_pool_size >= 0) && (desc->buffer_pool_size < _SG_MAX_POOL_SIZE));
  6630. _sg_init_pool(&p->buffer_pool, _sg_def(desc->buffer_pool_size, _SG_DEFAULT_BUFFER_POOL_SIZE));
  6631. p->buffers = (_sg_buffer*) SOKOL_MALLOC(sizeof(_sg_buffer) * p->buffer_pool.size);
  6632. SOKOL_ASSERT(p->buffers);
  6633. for (int i = 0; i < p->buffer_pool.size; i++) {
  6634. _sg_init_buffer(&p->buffers[i]);
  6635. }
  6636. SOKOL_ASSERT((desc->image_pool_size >= 0) && (desc->image_pool_size < _SG_MAX_POOL_SIZE));
  6637. _sg_init_pool(&p->image_pool, _sg_def(desc->image_pool_size, _SG_DEFAULT_IMAGE_POOL_SIZE));
  6638. p->images = (_sg_image*) SOKOL_MALLOC(sizeof(_sg_image) * p->image_pool.size);
  6639. SOKOL_ASSERT(p->images);
  6640. for (int i = 0; i < p->image_pool.size; i++) {
  6641. _sg_init_image(&p->images[i]);
  6642. }
  6643. SOKOL_ASSERT((desc->shader_pool_size >= 0) && (desc->shader_pool_size < _SG_MAX_POOL_SIZE));
  6644. _sg_init_pool(&p->shader_pool, _sg_def(desc->shader_pool_size, _SG_DEFAULT_SHADER_POOL_SIZE));
  6645. p->shaders = (_sg_shader*) SOKOL_MALLOC(sizeof(_sg_shader) * p->shader_pool.size);
  6646. SOKOL_ASSERT(p->shaders);
  6647. for (int i = 0; i < p->shader_pool.size; i++) {
  6648. _sg_init_shader(&p->shaders[i]);
  6649. }
  6650. SOKOL_ASSERT((desc->pipeline_pool_size >= 0) && (desc->pipeline_pool_size < _SG_MAX_POOL_SIZE));
  6651. _sg_init_pool(&p->pipeline_pool, _sg_def(desc->pipeline_pool_size, _SG_DEFAULT_PIPELINE_POOL_SIZE));
  6652. p->pipelines = (_sg_pipeline*) SOKOL_MALLOC(sizeof(_sg_pipeline) * p->pipeline_pool.size);
  6653. SOKOL_ASSERT(p->pipelines);
  6654. for (int i = 0; i < p->pipeline_pool.size; i++) {
  6655. _sg_init_pipeline(&p->pipelines[i]);
  6656. }
  6657. SOKOL_ASSERT((desc->pass_pool_size >= 0) && (desc->pass_pool_size < _SG_MAX_POOL_SIZE));
  6658. _sg_init_pool(&p->pass_pool, _sg_def(desc->pass_pool_size, _SG_DEFAULT_PASS_POOL_SIZE));
  6659. p->passes = (_sg_pass*) SOKOL_MALLOC(sizeof(_sg_pass) * p->pass_pool.size);
  6660. SOKOL_ASSERT(p->passes);
  6661. for (int i = 0; i < p->pass_pool.size; i++) {
  6662. _sg_init_pass(&p->passes[i]);
  6663. }
  6664. }
  6665. _SOKOL_PRIVATE void _sg_discard_pools(_sg_pools* p) {
  6666. SOKOL_ASSERT(p);
  6667. SOKOL_FREE(p->passes); p->passes = 0;
  6668. SOKOL_FREE(p->pipelines); p->pipelines = 0;
  6669. SOKOL_FREE(p->shaders); p->shaders = 0;
  6670. SOKOL_FREE(p->images); p->images = 0;
  6671. SOKOL_FREE(p->buffers); p->buffers = 0;
  6672. _sg_discard_pool(&p->pass_pool);
  6673. _sg_discard_pool(&p->pipeline_pool);
  6674. _sg_discard_pool(&p->shader_pool);
  6675. _sg_discard_pool(&p->image_pool);
  6676. _sg_discard_pool(&p->buffer_pool);
  6677. }
  6678. /* returns pointer to resource by id without matching id check */
  6679. _SOKOL_PRIVATE _sg_buffer* _sg_buffer_at(const _sg_pools* p, uint32_t buf_id) {
  6680. SOKOL_ASSERT(p && SG_INVALID_ID != buf_id);
  6681. int slot_index = _sg_slot_index(buf_id);
  6682. SOKOL_ASSERT((slot_index >= 0) && (slot_index < p->buffer_pool.size));
  6683. return &p->buffers[slot_index];
  6684. }
  6685. _SOKOL_PRIVATE _sg_image* _sg_image_at(const _sg_pools* p, uint32_t img_id) {
  6686. SOKOL_ASSERT(p && SG_INVALID_ID != img_id);
  6687. int slot_index = _sg_slot_index(img_id);
  6688. SOKOL_ASSERT((slot_index >= 0) && (slot_index < p->image_pool.size));
  6689. return &p->images[slot_index];
  6690. }
  6691. _SOKOL_PRIVATE _sg_shader* _sg_shader_at(const _sg_pools* p, uint32_t shd_id) {
  6692. SOKOL_ASSERT(p && SG_INVALID_ID != shd_id);
  6693. int slot_index = _sg_slot_index(shd_id);
  6694. SOKOL_ASSERT((slot_index >= 0) && (slot_index < p->shader_pool.size));
  6695. return &p->shaders[slot_index];
  6696. }
  6697. _SOKOL_PRIVATE _sg_pipeline* _sg_pipeline_at(const _sg_pools* p, uint32_t pip_id) {
  6698. SOKOL_ASSERT(p && SG_INVALID_ID != pip_id);
  6699. int slot_index = _sg_slot_index(pip_id);
  6700. SOKOL_ASSERT((slot_index >= 0) && (slot_index < p->pipeline_pool.size));
  6701. return &p->pipelines[slot_index];
  6702. }
  6703. _SOKOL_PRIVATE _sg_pass* _sg_pass_at(const _sg_pools* p, uint32_t pass_id) {
  6704. SOKOL_ASSERT(p && SG_INVALID_ID != pass_id);
  6705. int slot_index = _sg_slot_index(pass_id);
  6706. SOKOL_ASSERT((slot_index >= 0) && (slot_index < p->pass_pool.size));
  6707. return &p->passes[slot_index];
  6708. }
  6709. /* returns pointer to resource with matching id check, may return 0 */
  6710. _SOKOL_PRIVATE _sg_buffer* _sg_lookup_buffer(const _sg_pools* p, uint32_t buf_id) {
  6711. if (SG_INVALID_ID != buf_id) {
  6712. _sg_buffer* buf = _sg_buffer_at(p, buf_id);
  6713. if (buf->slot.id == buf_id) {
  6714. return buf;
  6715. }
  6716. }
  6717. return 0;
  6718. }
  6719. _SOKOL_PRIVATE _sg_image* _sg_lookup_image(const _sg_pools* p, uint32_t img_id) {
  6720. if (SG_INVALID_ID != img_id) {
  6721. _sg_image* img = _sg_image_at(p, img_id);
  6722. if (img->slot.id == img_id) {
  6723. return img;
  6724. }
  6725. }
  6726. return 0;
  6727. }
  6728. _SOKOL_PRIVATE _sg_shader* _sg_lookup_shader(const _sg_pools* p, uint32_t shd_id) {
  6729. SOKOL_ASSERT(p);
  6730. if (SG_INVALID_ID != shd_id) {
  6731. _sg_shader* shd = _sg_shader_at(p, shd_id);
  6732. if (shd->slot.id == shd_id) {
  6733. return shd;
  6734. }
  6735. }
  6736. return 0;
  6737. }
  6738. _SOKOL_PRIVATE _sg_pipeline* _sg_lookup_pipeline(const _sg_pools* p, uint32_t pip_id) {
  6739. SOKOL_ASSERT(p);
  6740. if (SG_INVALID_ID != pip_id) {
  6741. _sg_pipeline* pip = _sg_pipeline_at(p, pip_id);
  6742. if (pip->slot.id == pip_id) {
  6743. return pip;
  6744. }
  6745. }
  6746. return 0;
  6747. }
  6748. _SOKOL_PRIVATE _sg_pass* _sg_lookup_pass(const _sg_pools* p, uint32_t pass_id) {
  6749. SOKOL_ASSERT(p);
  6750. if (SG_INVALID_ID != pass_id) {
  6751. _sg_pass* pass = _sg_pass_at(p, pass_id);
  6752. if (pass->slot.id == pass_id) {
  6753. return pass;
  6754. }
  6755. }
  6756. return 0;
  6757. }
  6758. _SOKOL_PRIVATE void _sg_destroy_all_resources(_sg_pools* p) {
  6759. /* this is a bit dumb since it loops over all pool slots to
  6760. find the occupied slots, on the other hand it is only ever
  6761. executed at shutdown
  6762. */
  6763. for (int i = 0; i < p->buffer_pool.size; i++) {
  6764. if (p->buffers[i].slot.state == SG_RESOURCESTATE_VALID) {
  6765. _sg_destroy_buffer(&p->buffers[i]);
  6766. }
  6767. }
  6768. for (int i = 0; i < p->image_pool.size; i++) {
  6769. if (p->images[i].slot.state == SG_RESOURCESTATE_VALID) {
  6770. _sg_destroy_image(&p->images[i]);
  6771. }
  6772. }
  6773. for (int i = 0; i < p->shader_pool.size; i++) {
  6774. if (p->shaders[i].slot.state == SG_RESOURCESTATE_VALID) {
  6775. _sg_destroy_shader(&p->shaders[i]);
  6776. }
  6777. }
  6778. for (int i = 0; i < p->pipeline_pool.size; i++) {
  6779. if (p->pipelines[i].slot.state == SG_RESOURCESTATE_VALID) {
  6780. _sg_destroy_pipeline(&p->pipelines[i]);
  6781. }
  6782. }
  6783. for (int i = 0; i < p->pass_pool.size; i++) {
  6784. if (p->passes[i].slot.state == SG_RESOURCESTATE_VALID) {
  6785. _sg_destroy_pass(&p->passes[i]);
  6786. }
  6787. }
  6788. }
  6789. /*== VALIDATION LAYER ========================================================*/
  6790. #if defined(SOKOL_DEBUG)
  6791. typedef enum {
  6792. /* special case 'validation was successful' */
  6793. _SG_VALIDATE_SUCCESS,
  6794. /* buffer creation */
  6795. _SG_VALIDATE_BUFFERDESC_CANARY,
  6796. _SG_VALIDATE_BUFFERDESC_SIZE,
  6797. _SG_VALIDATE_BUFFERDESC_CONTENT,
  6798. _SG_VALIDATE_BUFFERDESC_NO_CONTENT,
  6799. /* image creation */
  6800. _SG_VALIDATE_IMAGEDESC_CANARY,
  6801. _SG_VALIDATE_IMAGEDESC_WIDTH,
  6802. _SG_VALIDATE_IMAGEDESC_HEIGHT,
  6803. _SG_VALIDATE_IMAGEDESC_RT_PIXELFORMAT,
  6804. _SG_VALIDATE_IMAGEDESC_NONRT_PIXELFORMAT,
  6805. _SG_VALIDATE_IMAGEDESC_MSAA_BUT_NO_RT,
  6806. _SG_VALIDATE_IMAGEDESC_NO_MSAA_RT_SUPPORT,
  6807. _SG_VALIDATE_IMAGEDESC_RT_IMMUTABLE,
  6808. _SG_VALIDATE_IMAGEDESC_RT_NO_CONTENT,
  6809. _SG_VALIDATE_IMAGEDESC_CONTENT,
  6810. _SG_VALIDATE_IMAGEDESC_NO_CONTENT,
  6811. /* shader creation */
  6812. _SG_VALIDATE_SHADERDESC_CANARY,
  6813. _SG_VALIDATE_SHADERDESC_SOURCE,
  6814. _SG_VALIDATE_SHADERDESC_BYTECODE,
  6815. _SG_VALIDATE_SHADERDESC_SOURCE_OR_BYTECODE,
  6816. _SG_VALIDATE_SHADERDESC_NO_BYTECODE_SIZE,
  6817. _SG_VALIDATE_SHADERDESC_NO_CONT_UBS,
  6818. _SG_VALIDATE_SHADERDESC_NO_CONT_IMGS,
  6819. _SG_VALIDATE_SHADERDESC_NO_CONT_UB_MEMBERS,
  6820. _SG_VALIDATE_SHADERDESC_NO_UB_MEMBERS,
  6821. _SG_VALIDATE_SHADERDESC_UB_MEMBER_NAME,
  6822. _SG_VALIDATE_SHADERDESC_UB_SIZE_MISMATCH,
  6823. _SG_VALIDATE_SHADERDESC_IMG_NAME,
  6824. /* pipeline creation */
  6825. _SG_VALIDATE_PIPELINEDESC_CANARY,
  6826. _SG_VALIDATE_PIPELINEDESC_SHADER,
  6827. _SG_VALIDATE_PIPELINEDESC_NO_ATTRS,
  6828. _SG_VALIDATE_PIPELINEDESC_LAYOUT_STRIDE4,
  6829. _SG_VALIDATE_PIPELINEDESC_ATTR_NAME,
  6830. _SG_VALIDATE_PIPELINEDESC_ATTR_SEMANTICS,
  6831. /* pass creation */
  6832. _SG_VALIDATE_PASSDESC_CANARY,
  6833. _SG_VALIDATE_PASSDESC_NO_COLOR_ATTS,
  6834. _SG_VALIDATE_PASSDESC_NO_CONT_COLOR_ATTS,
  6835. _SG_VALIDATE_PASSDESC_IMAGE,
  6836. _SG_VALIDATE_PASSDESC_MIPLEVEL,
  6837. _SG_VALIDATE_PASSDESC_FACE,
  6838. _SG_VALIDATE_PASSDESC_LAYER,
  6839. _SG_VALIDATE_PASSDESC_SLICE,
  6840. _SG_VALIDATE_PASSDESC_IMAGE_NO_RT,
  6841. _SG_VALIDATE_PASSDESC_COLOR_PIXELFORMATS,
  6842. _SG_VALIDATE_PASSDESC_COLOR_INV_PIXELFORMAT,
  6843. _SG_VALIDATE_PASSDESC_DEPTH_INV_PIXELFORMAT,
  6844. _SG_VALIDATE_PASSDESC_IMAGE_SIZES,
  6845. _SG_VALIDATE_PASSDESC_IMAGE_SAMPLE_COUNTS,
  6846. /* sg_begin_pass validation */
  6847. _SG_VALIDATE_BEGINPASS_PASS,
  6848. _SG_VALIDATE_BEGINPASS_IMAGE,
  6849. /* sg_apply_draw_state validation */
  6850. _SG_VALIDATE_ADS_PIP,
  6851. _SG_VALIDATE_ADS_VBS,
  6852. _SG_VALIDATE_ADS_VB_TYPE,
  6853. _SG_VALIDATE_ADS_NO_IB,
  6854. _SG_VALIDATE_ADS_IB,
  6855. _SG_VALIDATE_ADS_IB_TYPE,
  6856. _SG_VALIDATE_ADS_VS_IMGS,
  6857. _SG_VALIDATE_ADS_VS_IMG_TYPES,
  6858. _SG_VALIDATE_ADS_FS_IMGS,
  6859. _SG_VALIDATE_ADS_FS_IMG_TYPES,
  6860. _SG_VALIDATE_ADS_ATT_COUNT,
  6861. _SG_VALIDATE_ADS_COLOR_FORMAT,
  6862. _SG_VALIDATE_ADS_DEPTH_FORMAT,
  6863. _SG_VALIDATE_ADS_SAMPLE_COUNT,
  6864. /* sg_apply_uniform_block validation */
  6865. _SG_VALIDATE_AUB_NO_PIPELINE,
  6866. _SG_VALIDATE_AUB_NO_UB_AT_SLOT,
  6867. _SG_VALIDATE_AUB_SIZE,
  6868. /* sg_update_buffer validation */
  6869. _SG_VALIDATE_UPDBUF_USAGE,
  6870. _SG_VALIDATE_UPDBUF_SIZE,
  6871. _SG_VALIDATE_UPDBUF_ONCE,
  6872. /* sg_update_image validation */
  6873. _SG_VALIDATE_UPDIMG_USAGE,
  6874. _SG_VALIDATE_UPDIMG_NOTENOUGHDATA,
  6875. _SG_VALIDATE_UPDIMG_SIZEMISMATCH,
  6876. _SG_VALIDATE_UPDIMG_COMPRESSED,
  6877. _SG_VALIDATE_UPDIMG_ONCE
  6878. } _sg_validate_error;
  6879. /* return a human readable string for an _sg_validate_error */
  6880. _SOKOL_PRIVATE const char* _sg_validate_string(_sg_validate_error err) {
  6881. switch (err) {
  6882. /* buffer creation validation errors */
  6883. case _SG_VALIDATE_BUFFERDESC_CANARY: return "sg_buffer_desc not initialized";
  6884. case _SG_VALIDATE_BUFFERDESC_SIZE: return "sg_buffer_desc.size cannot be 0";
  6885. case _SG_VALIDATE_BUFFERDESC_CONTENT: return "immutable buffers must be initialized with content (sg_buffer_desc.content)";
  6886. case _SG_VALIDATE_BUFFERDESC_NO_CONTENT: return "dynamic/stream usage buffers cannot be initialized with content";
  6887. /* image creation validation errros */
  6888. case _SG_VALIDATE_IMAGEDESC_CANARY: return "sg_image_desc not initialized";
  6889. case _SG_VALIDATE_IMAGEDESC_WIDTH: return "sg_image_desc.width must be > 0";
  6890. case _SG_VALIDATE_IMAGEDESC_HEIGHT: return "sg_image_desc.height must be > 0";
  6891. case _SG_VALIDATE_IMAGEDESC_RT_PIXELFORMAT: return "invalid pixel format for render-target image";
  6892. case _SG_VALIDATE_IMAGEDESC_NONRT_PIXELFORMAT: return "invalid pixel format for non-render-target image";
  6893. case _SG_VALIDATE_IMAGEDESC_MSAA_BUT_NO_RT: return "non-render-target images cannot be multisampled";
  6894. case _SG_VALIDATE_IMAGEDESC_NO_MSAA_RT_SUPPORT: return "MSAA render targets not supported (SG_FEATURE_MSAA_RENDER_TARGETS)";
  6895. case _SG_VALIDATE_IMAGEDESC_RT_IMMUTABLE: return "render target images must be SG_USAGE_IMMUTABLE";
  6896. case _SG_VALIDATE_IMAGEDESC_RT_NO_CONTENT: return "render target images cannot be initialized with content";
  6897. case _SG_VALIDATE_IMAGEDESC_CONTENT: return "missing or invalid content for immutable image";
  6898. case _SG_VALIDATE_IMAGEDESC_NO_CONTENT: return "dynamic/stream usage images cannot be initialized with content";
  6899. /* shader creation */
  6900. case _SG_VALIDATE_SHADERDESC_CANARY: return "sg_shader_desc not initialized";
  6901. case _SG_VALIDATE_SHADERDESC_SOURCE: return "shader source code required";
  6902. case _SG_VALIDATE_SHADERDESC_BYTECODE: return "shader byte code required";
  6903. case _SG_VALIDATE_SHADERDESC_SOURCE_OR_BYTECODE: return "shader source or byte code required";
  6904. case _SG_VALIDATE_SHADERDESC_NO_BYTECODE_SIZE: return "shader byte code length (in bytes) required";
  6905. case _SG_VALIDATE_SHADERDESC_NO_CONT_UBS: return "shader uniform blocks must occupy continuous slots";
  6906. case _SG_VALIDATE_SHADERDESC_NO_CONT_UB_MEMBERS: return "uniform block members must occupy continuous slots";
  6907. case _SG_VALIDATE_SHADERDESC_NO_UB_MEMBERS: return "GL backend requires uniform block member declarations";
  6908. case _SG_VALIDATE_SHADERDESC_UB_MEMBER_NAME: return "uniform block member name missing";
  6909. case _SG_VALIDATE_SHADERDESC_UB_SIZE_MISMATCH: return "size of uniform block members doesn't match uniform block size";
  6910. case _SG_VALIDATE_SHADERDESC_NO_CONT_IMGS: return "shader images must occupy continuous slots";
  6911. case _SG_VALIDATE_SHADERDESC_IMG_NAME: return "GL backend requires uniform block member names";
  6912. /* pipeline creation */
  6913. case _SG_VALIDATE_PIPELINEDESC_CANARY: return "sg_pipeline_desc not initialized";
  6914. case _SG_VALIDATE_PIPELINEDESC_SHADER: return "sg_pipeline_desc.shader missing or invalid";
  6915. case _SG_VALIDATE_PIPELINEDESC_NO_ATTRS: return "sg_pipeline_desc.layout.attrs is empty or not continuous";
  6916. case _SG_VALIDATE_PIPELINEDESC_LAYOUT_STRIDE4: return "sg_pipeline_desc.layout.buffers[].stride must be multiple of 4";
  6917. case _SG_VALIDATE_PIPELINEDESC_ATTR_NAME: return "GLES2/WebGL vertex layouts must have attribute names";
  6918. case _SG_VALIDATE_PIPELINEDESC_ATTR_SEMANTICS: return "D3D11 vertex layouts must have attribute semantics (sem_name and sem_index)";
  6919. /* pass creation */
  6920. case _SG_VALIDATE_PASSDESC_CANARY: return "sg_pass_desc not initialized";
  6921. case _SG_VALIDATE_PASSDESC_NO_COLOR_ATTS: return "sg_pass_desc.color_attachments[0] must be valid";
  6922. case _SG_VALIDATE_PASSDESC_NO_CONT_COLOR_ATTS: return "color attachments must occupy continuous slots";
  6923. case _SG_VALIDATE_PASSDESC_IMAGE: return "pass attachment image is not valid";
  6924. case _SG_VALIDATE_PASSDESC_MIPLEVEL: return "pass attachment mip level is bigger than image has mipmaps";
  6925. case _SG_VALIDATE_PASSDESC_FACE: return "pass attachment image is cubemap, but face index is too big";
  6926. case _SG_VALIDATE_PASSDESC_LAYER: return "pass attachment image is array texture, but layer index is too big";
  6927. case _SG_VALIDATE_PASSDESC_SLICE: return "pass attachment image is 3d texture, but slice value is too big";
  6928. case _SG_VALIDATE_PASSDESC_IMAGE_NO_RT: return "pass attachment image must be render targets";
  6929. case _SG_VALIDATE_PASSDESC_COLOR_PIXELFORMATS: return "all pass color attachment images must have the same pixel format";
  6930. case _SG_VALIDATE_PASSDESC_COLOR_INV_PIXELFORMAT: return "pass color-attachment images must have a renderable pixel format";
  6931. case _SG_VALIDATE_PASSDESC_DEPTH_INV_PIXELFORMAT: return "pass depth-attachment image must have depth pixel format";
  6932. case _SG_VALIDATE_PASSDESC_IMAGE_SIZES: return "all pass attachments must have the same size";
  6933. case _SG_VALIDATE_PASSDESC_IMAGE_SAMPLE_COUNTS: return "all pass attachments must have the same sample count";
  6934. /* sg_begin_pass */
  6935. case _SG_VALIDATE_BEGINPASS_PASS: return "sg_begin_pass: pass must be valid";
  6936. case _SG_VALIDATE_BEGINPASS_IMAGE: return "sg_begin_pass: one or more attachment images are not valid";
  6937. /* sg_apply_draw_state */
  6938. case _SG_VALIDATE_ADS_PIP: return "sg_apply_draw_state: pipeline object required";
  6939. case _SG_VALIDATE_ADS_VBS: return "sg_apply_draw_state: number of vertex buffers doesn't match number of pipeline vertex layouts";
  6940. case _SG_VALIDATE_ADS_VB_TYPE: return "sg_apply_draw_state: buffer in vertex buffer slot is not a SG_BUFFERTYPE_VERTEXBUFFER";
  6941. case _SG_VALIDATE_ADS_NO_IB: return "sg_apply_draw_state: pipeline object defines indexed rendering, but no index buffer provided";
  6942. case _SG_VALIDATE_ADS_IB: return "sg_apply_draw_state: pipeline object defines non-indexed rendering, but index buffer provided";
  6943. case _SG_VALIDATE_ADS_IB_TYPE: return "sg_apply_draw_state: buffer in index buffer slot is not a SG_BUFFERTYPE_INDEXBUFFER";
  6944. case _SG_VALIDATE_ADS_VS_IMGS: return "sg_apply_draw_state: vertex shader image count doesn't match sg_shader_desc";
  6945. case _SG_VALIDATE_ADS_VS_IMG_TYPES: return "sg_apply_draw_state: one or more vertex shader image types don't match sg_shader_desc";
  6946. case _SG_VALIDATE_ADS_FS_IMGS: return "sg_apply_draw_state: fragment shader image count doesn't match sg_shader_desc";
  6947. case _SG_VALIDATE_ADS_FS_IMG_TYPES: return "sg_apply_draw_state: one or more fragment shader image types don't match sg_shader_desc";
  6948. case _SG_VALIDATE_ADS_ATT_COUNT: return "sg_apply_draw_state: color_attachment_count in pipeline doesn't match number of pass color attachments";
  6949. case _SG_VALIDATE_ADS_COLOR_FORMAT: return "sg_apply_draw_state: color_format in pipeline doesn't match pass color attachment pixel format";
  6950. case _SG_VALIDATE_ADS_DEPTH_FORMAT: return "sg_apply_draw_state: depth_format in pipeline doesn't match pass depth attachment pixel format";
  6951. case _SG_VALIDATE_ADS_SAMPLE_COUNT: return "sg_apply_draw_state: MSAA sample count in pipeline doesn't match render pass attachment sample count";
  6952. /* sg_apply_uniform_block */
  6953. case _SG_VALIDATE_AUB_NO_PIPELINE: return "sg_apply_uniform_block: must be called after sg_apply_draw_state()";
  6954. case _SG_VALIDATE_AUB_NO_UB_AT_SLOT: return "sg_apply_uniform_block: no uniform block declaration at this shader stage UB slot";
  6955. case _SG_VALIDATE_AUB_SIZE: return "sg_apply_uniform_block: data size exceeds declared uniform block size";
  6956. /* sg_update_buffer */
  6957. case _SG_VALIDATE_UPDBUF_USAGE: return "sg_update_buffer: cannot update immutable buffer";
  6958. case _SG_VALIDATE_UPDBUF_SIZE: return "sg_update_buffer: update size is bigger than buffer size";
  6959. case _SG_VALIDATE_UPDBUF_ONCE: return "sg_update_buffer: only one update allowed per buffer and frame";
  6960. /* sg_update_image */
  6961. case _SG_VALIDATE_UPDIMG_USAGE: return "sg_update_image: cannot update immutable image";
  6962. case _SG_VALIDATE_UPDIMG_NOTENOUGHDATA: return "sg_update_image: not enough subimage data provided";
  6963. case _SG_VALIDATE_UPDIMG_SIZEMISMATCH: return "sg_update_image: subimage data size mismatch";
  6964. case _SG_VALIDATE_UPDIMG_COMPRESSED: return "sg_update_image: cannot update images with compressed format";
  6965. case _SG_VALIDATE_UPDIMG_ONCE: return "sg_update_image: only one update allowed per image and frame";
  6966. default: return "unknown validation error";
  6967. }
  6968. }
  6969. #endif /* defined(SOKOL_DEBUG) */
  6970. /*-- generic backend state ---------------------------------------------------*/
  6971. typedef struct {
  6972. _sg_pools pools;
  6973. bool valid;
  6974. uint32_t frame_index;
  6975. sg_pass cur_pass;
  6976. sg_pipeline cur_pipeline;
  6977. bool pass_valid;
  6978. bool next_draw_valid;
  6979. #if defined(SOKOL_DEBUG)
  6980. _sg_validate_error validate_error;
  6981. #endif
  6982. } _sg_state;
  6983. static _sg_state _sg;
  6984. /*-- validation checks -------------------------------------------------------*/
  6985. #if defined(SOKOL_DEBUG)
  6986. _SOKOL_PRIVATE void _sg_validate_begin() {
  6987. _sg.validate_error = _SG_VALIDATE_SUCCESS;
  6988. }
  6989. _SOKOL_PRIVATE void _sg_validate(bool cond, _sg_validate_error err) {
  6990. if (!cond) {
  6991. _sg.validate_error = err;
  6992. SOKOL_LOG(_sg_validate_string(err));
  6993. }
  6994. }
  6995. _SOKOL_PRIVATE bool _sg_validate_end() {
  6996. if (_sg.validate_error != _SG_VALIDATE_SUCCESS) {
  6997. #if !defined(SOKOL_VALIDATE_NON_FATAL)
  6998. SOKOL_LOG("^^^^ VALIDATION FAILED, TERMINATING ^^^^");
  6999. SOKOL_ASSERT(false);
  7000. #endif
  7001. return false;
  7002. }
  7003. else {
  7004. return true;
  7005. }
  7006. }
  7007. #endif
  7008. _SOKOL_PRIVATE bool _sg_validate_buffer_desc(const sg_buffer_desc* desc) {
  7009. #if !defined(SOKOL_DEBUG)
  7010. return true;
  7011. #else
  7012. SOKOL_ASSERT(desc);
  7013. SOKOL_VALIDATE_BEGIN();
  7014. SOKOL_VALIDATE(desc->_start_canary == 0, _SG_VALIDATE_BUFFERDESC_CANARY);
  7015. SOKOL_VALIDATE(desc->_end_canary == 0, _SG_VALIDATE_BUFFERDESC_CANARY);
  7016. SOKOL_VALIDATE(desc->size > 0, _SG_VALIDATE_BUFFERDESC_SIZE);
  7017. bool ext = (0 != desc->gl_buffers[0]) || (0 != desc->mtl_buffers[0]) || (0 != desc->d3d11_buffer);
  7018. if (!ext && (_sg_def(desc->usage, SG_USAGE_IMMUTABLE) == SG_USAGE_IMMUTABLE)) {
  7019. SOKOL_VALIDATE(0 != desc->content, _SG_VALIDATE_BUFFERDESC_CONTENT);
  7020. }
  7021. else {
  7022. SOKOL_VALIDATE(0 == desc->content, _SG_VALIDATE_BUFFERDESC_NO_CONTENT);
  7023. }
  7024. return SOKOL_VALIDATE_END();
  7025. #endif
  7026. }
  7027. _SOKOL_PRIVATE bool _sg_validate_image_desc(const sg_image_desc* desc) {
  7028. #if !defined(SOKOL_DEBUG)
  7029. return true;
  7030. #else
  7031. SOKOL_ASSERT(desc);
  7032. SOKOL_VALIDATE_BEGIN();
  7033. SOKOL_VALIDATE(desc->_start_canary == 0, _SG_VALIDATE_IMAGEDESC_CANARY);
  7034. SOKOL_VALIDATE(desc->_end_canary == 0, _SG_VALIDATE_IMAGEDESC_CANARY);
  7035. SOKOL_VALIDATE(desc->width > 0, _SG_VALIDATE_IMAGEDESC_WIDTH);
  7036. SOKOL_VALIDATE(desc->height > 0, _SG_VALIDATE_IMAGEDESC_HEIGHT);
  7037. const sg_pixel_format fmt = _sg_def(desc->pixel_format, SG_PIXELFORMAT_RGBA8);
  7038. const sg_usage usage = _sg_def(desc->usage, SG_USAGE_IMMUTABLE);
  7039. const bool ext = (0 != desc->gl_textures[0]) || (0 != desc->mtl_textures[0]) || (0 != desc->d3d11_texture);
  7040. if (desc->render_target) {
  7041. if (desc->sample_count > 1) {
  7042. SOKOL_VALIDATE(_sg_query_feature(SG_FEATURE_MSAA_RENDER_TARGETS), _SG_VALIDATE_IMAGEDESC_NO_MSAA_RT_SUPPORT);
  7043. }
  7044. const bool valid_color_fmt = _sg_is_valid_rendertarget_color_format(fmt);
  7045. const bool valid_depth_fmt = _sg_is_valid_rendertarget_depth_format(fmt);
  7046. SOKOL_VALIDATE(valid_color_fmt || valid_depth_fmt, _SG_VALIDATE_IMAGEDESC_RT_PIXELFORMAT);
  7047. SOKOL_VALIDATE(usage == SG_USAGE_IMMUTABLE, _SG_VALIDATE_IMAGEDESC_RT_IMMUTABLE);
  7048. SOKOL_VALIDATE(desc->content.subimage[0][0].ptr==0, _SG_VALIDATE_IMAGEDESC_RT_NO_CONTENT);
  7049. }
  7050. else {
  7051. SOKOL_VALIDATE(desc->sample_count <= 1, _SG_VALIDATE_IMAGEDESC_MSAA_BUT_NO_RT);
  7052. const bool valid_nonrt_fmt = !_sg_is_valid_rendertarget_depth_format(fmt);
  7053. SOKOL_VALIDATE(valid_nonrt_fmt, _SG_VALIDATE_IMAGEDESC_NONRT_PIXELFORMAT);
  7054. /* FIXME: should use the same "expected size" computation as in _sg_validate_update_image() here */
  7055. if (!ext && (usage == SG_USAGE_IMMUTABLE)) {
  7056. const int num_faces = _sg_def(desc->type, SG_IMAGETYPE_2D)==SG_IMAGETYPE_CUBE ? 6:1;
  7057. const int num_mips = _sg_def(desc->num_mipmaps, 1);
  7058. for (int face_index = 0; face_index < num_faces; face_index++) {
  7059. for (int mip_index = 0; mip_index < num_mips; mip_index++) {
  7060. const bool has_data = desc->content.subimage[face_index][mip_index].ptr != 0;
  7061. const bool has_size = desc->content.subimage[face_index][mip_index].size > 0;
  7062. SOKOL_VALIDATE(has_data && has_size, _SG_VALIDATE_IMAGEDESC_CONTENT);
  7063. }
  7064. }
  7065. }
  7066. else {
  7067. for (int face_index = 0; face_index < SG_CUBEFACE_NUM; face_index++) {
  7068. for (int mip_index = 0; mip_index < SG_MAX_MIPMAPS; mip_index++) {
  7069. const bool no_data = 0 == desc->content.subimage[face_index][mip_index].ptr;
  7070. const bool no_size = 0 == desc->content.subimage[face_index][mip_index].size;
  7071. SOKOL_VALIDATE(no_data && no_size, _SG_VALIDATE_IMAGEDESC_NO_CONTENT);
  7072. }
  7073. }
  7074. }
  7075. }
  7076. return SOKOL_VALIDATE_END();
  7077. #endif
  7078. }
  7079. _SOKOL_PRIVATE bool _sg_validate_shader_desc(const sg_shader_desc* desc) {
  7080. #if !defined(SOKOL_DEBUG)
  7081. return true;
  7082. #else
  7083. SOKOL_ASSERT(desc);
  7084. SOKOL_VALIDATE_BEGIN();
  7085. SOKOL_VALIDATE(desc->_start_canary == 0, _SG_VALIDATE_SHADERDESC_CANARY);
  7086. SOKOL_VALIDATE(desc->_end_canary == 0, _SG_VALIDATE_SHADERDESC_CANARY);
  7087. #if defined(SOKOL_GLCORE33) || defined(SOKOL_GLES2) || defined(SOKOL_GLES3)
  7088. /* on GL, must provide shader source code */
  7089. SOKOL_VALIDATE(0 != desc->vs.source, _SG_VALIDATE_SHADERDESC_SOURCE);
  7090. SOKOL_VALIDATE(0 != desc->fs.source, _SG_VALIDATE_SHADERDESC_SOURCE);
  7091. #elif defined(SOKOL_METAL_MACOS) || defined(SOKOL_METAL_IOS) || defined(SOKOL_D3D11_SHADER_COMPILER)
  7092. /* on Metal or D3D with shader compiler, must provide shader source code or byte code */
  7093. SOKOL_VALIDATE((0 != desc->vs.source)||(0 != desc->vs.byte_code), _SG_VALIDATE_SHADERDESC_SOURCE_OR_BYTECODE);
  7094. SOKOL_VALIDATE((0 != desc->fs.source)||(0 != desc->fs.byte_code), _SG_VALIDATE_SHADERDESC_SOURCE_OR_BYTECODE);
  7095. #else
  7096. /* on D3D11 without shader compiler, must provide byte code */
  7097. SOKOL_VALIDATE(0 != desc->vs.byte_code, _SG_VALIDATE_SHADERDESC_BYTECODE);
  7098. SOKOL_VALIDATE(0 != desc->fs.byte_code, _SG_VALIDATE_SHADERDESC_BYTECODE);
  7099. #endif
  7100. /* if shader byte code, the size must also be provided */
  7101. if (0 != desc->vs.byte_code) {
  7102. SOKOL_VALIDATE(desc->vs.byte_code_size > 0, _SG_VALIDATE_SHADERDESC_NO_BYTECODE_SIZE);
  7103. }
  7104. if (0 != desc->fs.byte_code) {
  7105. SOKOL_VALIDATE(desc->fs.byte_code_size > 0, _SG_VALIDATE_SHADERDESC_NO_BYTECODE_SIZE);
  7106. }
  7107. for (int stage_index = 0; stage_index < SG_NUM_SHADER_STAGES; stage_index++) {
  7108. const sg_shader_stage_desc* stage_desc = (stage_index == 0)? &desc->vs : &desc->fs;
  7109. bool uniform_blocks_continuous = true;
  7110. for (int ub_index = 0; ub_index < SG_MAX_SHADERSTAGE_UBS; ub_index++) {
  7111. const sg_shader_uniform_block_desc* ub_desc = &stage_desc->uniform_blocks[ub_index];
  7112. if (ub_desc->size > 0) {
  7113. SOKOL_VALIDATE(uniform_blocks_continuous, _SG_VALIDATE_SHADERDESC_NO_CONT_UBS);
  7114. bool uniforms_continuous = true;
  7115. int uniform_offset = 0;
  7116. int num_uniforms = 0;
  7117. for (int u_index = 0; u_index < SG_MAX_UB_MEMBERS; u_index++) {
  7118. const sg_shader_uniform_desc* u_desc = &ub_desc->uniforms[u_index];
  7119. if (u_desc->type != SG_UNIFORMTYPE_INVALID) {
  7120. SOKOL_VALIDATE(uniforms_continuous, _SG_VALIDATE_SHADERDESC_NO_CONT_UB_MEMBERS);
  7121. #if defined(SOKOL_GLES2)
  7122. SOKOL_VALIDATE(u_desc->name, _SG_VALIDATE_SHADERDESC_UB_MEMBER_NAME);
  7123. #endif
  7124. const int array_count = _sg_def(u_desc->array_count, 1);
  7125. uniform_offset += _sg_uniform_size(u_desc->type, array_count);
  7126. num_uniforms++;
  7127. }
  7128. else {
  7129. uniforms_continuous = false;
  7130. }
  7131. }
  7132. #if defined(SOKOL_GLCORE33) || defined(SOKOL_GLES2) || defined(SOKOL_GLES3)
  7133. SOKOL_VALIDATE(uniform_offset == ub_desc->size, _SG_VALIDATE_SHADERDESC_UB_SIZE_MISMATCH);
  7134. SOKOL_VALIDATE(num_uniforms > 0, _SG_VALIDATE_SHADERDESC_NO_UB_MEMBERS);
  7135. #endif
  7136. }
  7137. else {
  7138. uniform_blocks_continuous = false;
  7139. }
  7140. }
  7141. bool images_continuous = true;
  7142. for (int img_index = 0; img_index < SG_MAX_SHADERSTAGE_IMAGES; img_index++) {
  7143. const sg_shader_image_desc* img_desc = &stage_desc->images[img_index];
  7144. if (img_desc->type != _SG_IMAGETYPE_DEFAULT) {
  7145. SOKOL_VALIDATE(images_continuous, _SG_VALIDATE_SHADERDESC_NO_CONT_IMGS);
  7146. #if defined(SOKOL_GLES2)
  7147. SOKOL_VALIDATE(img_desc->name, _SG_VALIDATE_SHADERDESC_IMG_NAME);
  7148. #endif
  7149. }
  7150. else {
  7151. images_continuous = false;
  7152. }
  7153. }
  7154. }
  7155. return SOKOL_VALIDATE_END();
  7156. #endif
  7157. }
  7158. _SOKOL_PRIVATE bool _sg_validate_pipeline_desc(const sg_pipeline_desc* desc) {
  7159. #if !defined(SOKOL_DEBUG)
  7160. return true;
  7161. #else
  7162. SOKOL_ASSERT(desc);
  7163. SOKOL_VALIDATE_BEGIN();
  7164. SOKOL_VALIDATE(desc->_start_canary == 0, _SG_VALIDATE_PIPELINEDESC_CANARY);
  7165. SOKOL_VALIDATE(desc->_end_canary == 0, _SG_VALIDATE_PIPELINEDESC_CANARY);
  7166. SOKOL_VALIDATE(desc->shader.id != SG_INVALID_ID, _SG_VALIDATE_PIPELINEDESC_SHADER);
  7167. const _sg_shader* shd = _sg_lookup_shader(&_sg.pools, desc->shader.id);
  7168. SOKOL_VALIDATE(shd && shd->slot.state == SG_RESOURCESTATE_VALID, _SG_VALIDATE_PIPELINEDESC_SHADER);
  7169. for (int buf_index = 0; buf_index < SG_MAX_SHADERSTAGE_BUFFERS; buf_index++) {
  7170. const sg_buffer_layout_desc* l_desc = &desc->layout.buffers[buf_index];
  7171. if (l_desc->stride == 0) {
  7172. continue;
  7173. }
  7174. SOKOL_VALIDATE((l_desc->stride & 3) == 0, _SG_VALIDATE_PIPELINEDESC_LAYOUT_STRIDE4);
  7175. }
  7176. SOKOL_VALIDATE(desc->layout.attrs[0].format != SG_VERTEXFORMAT_INVALID, _SG_VALIDATE_PIPELINEDESC_NO_ATTRS);
  7177. bool attrs_cont = true;
  7178. for (int attr_index = 0; attr_index < SG_MAX_VERTEX_ATTRIBUTES; attr_index++) {
  7179. const sg_vertex_attr_desc* a_desc = &desc->layout.attrs[attr_index];
  7180. if (a_desc->format == SG_VERTEXFORMAT_INVALID) {
  7181. attrs_cont = false;
  7182. continue;
  7183. }
  7184. SOKOL_VALIDATE(attrs_cont, _SG_VALIDATE_PIPELINEDESC_NO_ATTRS);
  7185. SOKOL_ASSERT(a_desc->buffer_index < SG_MAX_SHADERSTAGE_BUFFERS);
  7186. #if defined(SOKOL_GLES2)
  7187. /* on GLES2, vertex attribute names must be provided */
  7188. SOKOL_VALIDATE(a_desc->name, _SG_VALIDATE_PIPELINEDESC_ATTR_NAME);
  7189. #elif defined(SOKOL_D3D11)
  7190. /* on D3D11, semantic names (and semantic indices) must be provided */
  7191. SOKOL_VALIDATE(a_desc->sem_name, _SG_VALIDATE_PIPELINEDESC_ATTR_SEMANTICS);
  7192. #endif
  7193. }
  7194. return SOKOL_VALIDATE_END();
  7195. #endif
  7196. }
  7197. _SOKOL_PRIVATE bool _sg_validate_pass_desc(const sg_pass_desc* desc) {
  7198. #if !defined(SOKOL_DEBUG)
  7199. return true;
  7200. #else
  7201. SOKOL_ASSERT(desc);
  7202. SOKOL_VALIDATE_BEGIN();
  7203. SOKOL_VALIDATE(desc->_start_canary == 0, _SG_VALIDATE_PASSDESC_CANARY);
  7204. SOKOL_VALIDATE(desc->_end_canary == 0, _SG_VALIDATE_PASSDESC_CANARY);
  7205. bool atts_cont = true;
  7206. sg_pixel_format color_fmt = SG_PIXELFORMAT_NONE;
  7207. int width = -1, height = -1, sample_count = -1;
  7208. for (int att_index = 0; att_index < SG_MAX_COLOR_ATTACHMENTS; att_index++) {
  7209. const sg_attachment_desc* att = &desc->color_attachments[att_index];
  7210. if (att->image.id == SG_INVALID_ID) {
  7211. SOKOL_VALIDATE(att_index > 0, _SG_VALIDATE_PASSDESC_NO_COLOR_ATTS);
  7212. atts_cont = false;
  7213. continue;
  7214. }
  7215. SOKOL_VALIDATE(atts_cont, _SG_VALIDATE_PASSDESC_NO_CONT_COLOR_ATTS);
  7216. const _sg_image* img = _sg_lookup_image(&_sg.pools, att->image.id);
  7217. SOKOL_VALIDATE(img && img->slot.state == SG_RESOURCESTATE_VALID, _SG_VALIDATE_PASSDESC_IMAGE);
  7218. SOKOL_VALIDATE(att->mip_level < img->num_mipmaps, _SG_VALIDATE_PASSDESC_MIPLEVEL);
  7219. if (img->type == SG_IMAGETYPE_CUBE) {
  7220. SOKOL_VALIDATE(att->face < 6, _SG_VALIDATE_PASSDESC_FACE);
  7221. }
  7222. else if (img->type == SG_IMAGETYPE_ARRAY) {
  7223. SOKOL_VALIDATE(att->layer < img->depth, _SG_VALIDATE_PASSDESC_LAYER);
  7224. }
  7225. else if (img->type == SG_IMAGETYPE_3D) {
  7226. SOKOL_VALIDATE(att->slice < img->depth, _SG_VALIDATE_PASSDESC_SLICE);
  7227. }
  7228. SOKOL_VALIDATE(img->render_target, _SG_VALIDATE_PASSDESC_IMAGE_NO_RT);
  7229. if (att_index == 0) {
  7230. color_fmt = img->pixel_format;
  7231. width = img->width >> att->mip_level;
  7232. height = img->height >> att->mip_level;
  7233. sample_count = img->sample_count;
  7234. }
  7235. else {
  7236. SOKOL_VALIDATE(img->pixel_format == color_fmt, _SG_VALIDATE_PASSDESC_COLOR_PIXELFORMATS);
  7237. SOKOL_VALIDATE(width == img->width >> att->mip_level, _SG_VALIDATE_PASSDESC_IMAGE_SIZES);
  7238. SOKOL_VALIDATE(height == img->height >> att->mip_level, _SG_VALIDATE_PASSDESC_IMAGE_SIZES);
  7239. SOKOL_VALIDATE(sample_count == img->sample_count, _SG_VALIDATE_PASSDESC_IMAGE_SAMPLE_COUNTS);
  7240. }
  7241. SOKOL_VALIDATE(_sg_is_valid_rendertarget_color_format(img->pixel_format), _SG_VALIDATE_PASSDESC_COLOR_INV_PIXELFORMAT);
  7242. }
  7243. if (desc->depth_stencil_attachment.image.id != SG_INVALID_ID) {
  7244. const sg_attachment_desc* att = &desc->depth_stencil_attachment;
  7245. const _sg_image* img = _sg_lookup_image(&_sg.pools, att->image.id);
  7246. SOKOL_VALIDATE(img && img->slot.state == SG_RESOURCESTATE_VALID, _SG_VALIDATE_PASSDESC_IMAGE);
  7247. SOKOL_VALIDATE(att->mip_level < img->num_mipmaps, _SG_VALIDATE_PASSDESC_MIPLEVEL);
  7248. if (img->type == SG_IMAGETYPE_CUBE) {
  7249. SOKOL_VALIDATE(att->face < 6, _SG_VALIDATE_PASSDESC_FACE);
  7250. }
  7251. else if (img->type == SG_IMAGETYPE_ARRAY) {
  7252. SOKOL_VALIDATE(att->layer < img->depth, _SG_VALIDATE_PASSDESC_LAYER);
  7253. }
  7254. else if (img->type == SG_IMAGETYPE_3D) {
  7255. SOKOL_VALIDATE(att->slice < img->depth, _SG_VALIDATE_PASSDESC_SLICE);
  7256. }
  7257. SOKOL_VALIDATE(img->render_target, _SG_VALIDATE_PASSDESC_IMAGE_NO_RT);
  7258. SOKOL_VALIDATE(width == img->width >> att->mip_level, _SG_VALIDATE_PASSDESC_IMAGE_SIZES);
  7259. SOKOL_VALIDATE(height == img->height >> att->mip_level, _SG_VALIDATE_PASSDESC_IMAGE_SIZES);
  7260. SOKOL_VALIDATE(sample_count == img->sample_count, _SG_VALIDATE_PASSDESC_IMAGE_SAMPLE_COUNTS);
  7261. SOKOL_VALIDATE(_sg_is_valid_rendertarget_depth_format(img->pixel_format), _SG_VALIDATE_PASSDESC_DEPTH_INV_PIXELFORMAT);
  7262. }
  7263. return SOKOL_VALIDATE_END();
  7264. #endif
  7265. }
  7266. _SOKOL_PRIVATE bool _sg_validate_begin_pass(_sg_pass* pass) {
  7267. #if !defined(SOKOL_DEBUG)
  7268. return true;
  7269. #else
  7270. SOKOL_VALIDATE_BEGIN();
  7271. SOKOL_VALIDATE(pass->slot.state == SG_RESOURCESTATE_VALID, _SG_VALIDATE_BEGINPASS_PASS);
  7272. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  7273. const _sg_attachment* att = &pass->color_atts[i];
  7274. if (att->image) {
  7275. SOKOL_VALIDATE(att->image->slot.state == SG_RESOURCESTATE_VALID, _SG_VALIDATE_BEGINPASS_IMAGE);
  7276. SOKOL_VALIDATE(att->image->slot.id == att->image_id.id, _SG_VALIDATE_BEGINPASS_IMAGE);
  7277. }
  7278. }
  7279. if (pass->ds_att.image) {
  7280. const _sg_attachment* att = &pass->ds_att;
  7281. SOKOL_VALIDATE(att->image->slot.state == SG_RESOURCESTATE_VALID, _SG_VALIDATE_BEGINPASS_IMAGE);
  7282. SOKOL_VALIDATE(att->image->slot.id == att->image_id.id, _SG_VALIDATE_BEGINPASS_IMAGE);
  7283. }
  7284. return SOKOL_VALIDATE_END();
  7285. #endif
  7286. }
  7287. _SOKOL_PRIVATE bool _sg_validate_draw_state(const sg_draw_state* ds) {
  7288. #if !defined(SOKOL_DEBUG)
  7289. return true;
  7290. #else
  7291. SOKOL_VALIDATE_BEGIN();
  7292. /* has pipeline and pipeline still exists */
  7293. SOKOL_VALIDATE(ds->pipeline.id != SG_INVALID_ID, _SG_VALIDATE_ADS_PIP);
  7294. const _sg_pipeline* pip = _sg_lookup_pipeline(&_sg.pools, ds->pipeline.id);
  7295. if (!pip) {
  7296. /* cannot continue with validation without pipeline object */
  7297. return SOKOL_VALIDATE_END();
  7298. }
  7299. SOKOL_ASSERT(pip->shader);
  7300. /* has expected vertex buffers, and vertex buffers still exist */
  7301. for (int i = 0; i < SG_MAX_SHADERSTAGE_BUFFERS; i++) {
  7302. if (ds->vertex_buffers[i].id != SG_INVALID_ID) {
  7303. SOKOL_VALIDATE(pip->vertex_layout_valid[i], _SG_VALIDATE_ADS_VBS);
  7304. /* buffers in vertex-buffer-slots must be of type SG_BUFFERTYPE_VERTEXBUFFER */
  7305. const _sg_buffer* buf = _sg_lookup_buffer(&_sg.pools, ds->vertex_buffers[i].id);
  7306. SOKOL_ASSERT(buf);
  7307. if (buf->slot.state == SG_RESOURCESTATE_VALID) {
  7308. SOKOL_VALIDATE(SG_BUFFERTYPE_VERTEXBUFFER == buf->type, _SG_VALIDATE_ADS_VB_TYPE);
  7309. }
  7310. }
  7311. else {
  7312. /* vertex buffer provided in a slot which has no vertex layout in pipeline */
  7313. SOKOL_VALIDATE(!pip->vertex_layout_valid[i], _SG_VALIDATE_ADS_VBS);
  7314. }
  7315. }
  7316. /* index buffer expected or not, and index buffer still exists */
  7317. if (pip->index_type == SG_INDEXTYPE_NONE) {
  7318. /* pipeline defines non-indexed rendering, but index buffer provided */
  7319. SOKOL_VALIDATE(ds->index_buffer.id == SG_INVALID_ID, _SG_VALIDATE_ADS_IB);
  7320. }
  7321. else {
  7322. /* pipeline defines indexed rendering, but no index buffer provided */
  7323. SOKOL_VALIDATE(ds->index_buffer.id != SG_INVALID_ID, _SG_VALIDATE_ADS_NO_IB);
  7324. }
  7325. if (ds->index_buffer.id != SG_INVALID_ID) {
  7326. /* buffer in index-buffer-slot must be of type SG_BUFFERTYPE_INDEXBUFFER */
  7327. const _sg_buffer* buf = _sg_lookup_buffer(&_sg.pools, ds->index_buffer.id);
  7328. SOKOL_ASSERT(buf);
  7329. if (buf->slot.state == SG_RESOURCESTATE_VALID) {
  7330. SOKOL_VALIDATE(SG_BUFFERTYPE_INDEXBUFFER == buf->type, _SG_VALIDATE_ADS_IB_TYPE);
  7331. }
  7332. }
  7333. /* has expected vertex shader images */
  7334. for (int i = 0; i < SG_MAX_SHADERSTAGE_IMAGES; i++) {
  7335. _sg_shader_stage* stage = &pip->shader->stage[SG_SHADERSTAGE_VS];
  7336. if (ds->vs_images[i].id != SG_INVALID_ID) {
  7337. SOKOL_VALIDATE(i < stage->num_images, _SG_VALIDATE_ADS_VS_IMGS);
  7338. const _sg_image* img = _sg_lookup_image(&_sg.pools, ds->vs_images[i].id);
  7339. SOKOL_ASSERT(img);
  7340. if (img->slot.state == SG_RESOURCESTATE_VALID) {
  7341. SOKOL_VALIDATE(img->type == stage->images[i].type, _SG_VALIDATE_ADS_VS_IMG_TYPES);
  7342. }
  7343. }
  7344. else {
  7345. SOKOL_VALIDATE(i >= stage->num_images, _SG_VALIDATE_ADS_VS_IMGS);
  7346. }
  7347. }
  7348. /* has expected fragment shader images */
  7349. for (int i = 0; i < SG_MAX_SHADERSTAGE_IMAGES; i++) {
  7350. _sg_shader_stage* stage = &pip->shader->stage[SG_SHADERSTAGE_FS];
  7351. if (ds->fs_images[i].id != SG_INVALID_ID) {
  7352. SOKOL_VALIDATE(i < stage->num_images, _SG_VALIDATE_ADS_FS_IMGS);
  7353. const _sg_image* img = _sg_lookup_image(&_sg.pools, ds->fs_images[i].id);
  7354. SOKOL_ASSERT(img);
  7355. if (img->slot.state == SG_RESOURCESTATE_VALID) {
  7356. SOKOL_VALIDATE(img->type == stage->images[i].type, _SG_VALIDATE_ADS_FS_IMG_TYPES);
  7357. }
  7358. }
  7359. else {
  7360. SOKOL_VALIDATE(i >= stage->num_images, _SG_VALIDATE_ADS_FS_IMGS);
  7361. }
  7362. }
  7363. /* check that pipeline attributes match current pass attributes */
  7364. const _sg_pass* pass = _sg_lookup_pass(&_sg.pools, _sg.cur_pass.id);
  7365. if (pass) {
  7366. /* an offscreen pass */
  7367. SOKOL_VALIDATE(pip->color_attachment_count == pass->num_color_atts, _SG_VALIDATE_ADS_ATT_COUNT);
  7368. SOKOL_VALIDATE(pip->color_format == pass->color_atts[0].image->pixel_format, _SG_VALIDATE_ADS_COLOR_FORMAT);
  7369. SOKOL_VALIDATE(pip->sample_count == pass->color_atts[0].image->sample_count, _SG_VALIDATE_ADS_SAMPLE_COUNT);
  7370. if (pass->ds_att.image) {
  7371. SOKOL_VALIDATE(pip->depth_format == pass->ds_att.image->pixel_format, _SG_VALIDATE_ADS_DEPTH_FORMAT);
  7372. }
  7373. else {
  7374. SOKOL_VALIDATE(pip->depth_format == SG_PIXELFORMAT_NONE, _SG_VALIDATE_ADS_DEPTH_FORMAT);
  7375. }
  7376. }
  7377. else {
  7378. /* default pass */
  7379. SOKOL_VALIDATE(pip->color_attachment_count == 1, _SG_VALIDATE_ADS_ATT_COUNT);
  7380. SOKOL_VALIDATE(pip->color_format == SG_PIXELFORMAT_RGBA8, _SG_VALIDATE_ADS_COLOR_FORMAT);
  7381. SOKOL_VALIDATE(pip->depth_format == SG_PIXELFORMAT_DEPTHSTENCIL, _SG_VALIDATE_ADS_DEPTH_FORMAT);
  7382. /* FIXME: hmm, we don't know if the default framebuffer is multisampled here */
  7383. }
  7384. return SOKOL_VALIDATE_END();
  7385. #endif
  7386. }
  7387. _SOKOL_PRIVATE bool _sg_validate_apply_uniform_block(sg_shader_stage stage_index, int ub_index, const void* data, int num_bytes) {
  7388. #if !defined(SOKOL_DEBUG)
  7389. return true;
  7390. #else
  7391. SOKOL_ASSERT((stage_index == SG_SHADERSTAGE_VS) || (stage_index == SG_SHADERSTAGE_FS));
  7392. SOKOL_ASSERT((ub_index >= 0) && (ub_index < SG_MAX_SHADERSTAGE_UBS));
  7393. SOKOL_VALIDATE_BEGIN();
  7394. SOKOL_VALIDATE(_sg.cur_pipeline.id != SG_INVALID_ID, _SG_VALIDATE_AUB_NO_PIPELINE);
  7395. const _sg_pipeline* pip = _sg_lookup_pipeline(&_sg.pools, _sg.cur_pipeline.id);
  7396. SOKOL_ASSERT(pip && (pip->slot.id == _sg.cur_pipeline.id));
  7397. SOKOL_ASSERT(pip->shader && (pip->shader->slot.id == pip->shader_id.id));
  7398. /* check that there is a uniform block at 'stage' and 'ub_index' */
  7399. const _sg_shader_stage* stage = &pip->shader->stage[stage_index];
  7400. SOKOL_VALIDATE(ub_index < stage->num_uniform_blocks, _SG_VALIDATE_AUB_NO_UB_AT_SLOT);
  7401. /* check that the provided data size doesn't exceed the uniform block size */
  7402. SOKOL_VALIDATE(num_bytes <= stage->uniform_blocks[ub_index].size, _SG_VALIDATE_AUB_SIZE);
  7403. return SOKOL_VALIDATE_END();
  7404. #endif
  7405. }
  7406. _SOKOL_PRIVATE bool _sg_validate_update_buffer(const _sg_buffer* buf, const void* data, int size) {
  7407. #if !defined(SOKOL_DEBUG)
  7408. return true;
  7409. #else
  7410. SOKOL_ASSERT(buf && data);
  7411. SOKOL_VALIDATE_BEGIN();
  7412. SOKOL_VALIDATE(buf->usage != SG_USAGE_IMMUTABLE, _SG_VALIDATE_UPDBUF_USAGE);
  7413. SOKOL_VALIDATE(buf->size >= size, _SG_VALIDATE_UPDBUF_SIZE);
  7414. SOKOL_VALIDATE(buf->upd_frame_index != _sg.frame_index, _SG_VALIDATE_UPDBUF_ONCE);
  7415. return SOKOL_VALIDATE_END();
  7416. #endif
  7417. }
  7418. _SOKOL_PRIVATE bool _sg_validate_update_image(const _sg_image* img, const sg_image_content* data) {
  7419. #if !defined(SOKOL_DEBUG)
  7420. return true;
  7421. #else
  7422. SOKOL_ASSERT(img && data);
  7423. SOKOL_VALIDATE_BEGIN();
  7424. SOKOL_VALIDATE(img->usage != SG_USAGE_IMMUTABLE, _SG_VALIDATE_UPDIMG_USAGE);
  7425. SOKOL_VALIDATE(img->upd_frame_index != _sg.frame_index, _SG_VALIDATE_UPDIMG_ONCE);
  7426. SOKOL_VALIDATE(!_sg_is_compressed_pixel_format(img->pixel_format), _SG_VALIDATE_UPDIMG_COMPRESSED);
  7427. const int num_faces = (img->type == SG_IMAGETYPE_CUBE) ? 6 : 1;
  7428. const int num_mips = img->num_mipmaps;
  7429. for (int face_index = 0; face_index < num_faces; face_index++) {
  7430. for (int mip_index = 0; mip_index < num_mips; mip_index++) {
  7431. SOKOL_VALIDATE(0 != data->subimage[face_index][mip_index].ptr, _SG_VALIDATE_UPDIMG_NOTENOUGHDATA);
  7432. const int mip_width = _sg_max(img->width >> mip_index, 1);
  7433. const int mip_height = _sg_max(img->height >> mip_index, 1);
  7434. const int bytes_per_slice = _sg_surface_pitch(img->pixel_format, mip_width, mip_height);
  7435. const int expected_size = bytes_per_slice * img->depth;
  7436. SOKOL_VALIDATE(data->subimage[face_index][mip_index].size == expected_size, _SG_VALIDATE_UPDIMG_SIZEMISMATCH);
  7437. }
  7438. }
  7439. return SOKOL_VALIDATE_END();
  7440. #endif
  7441. }
  7442. /*== PUBLIC API FUNCTIONS ====================================================*/
  7443. void sg_setup(const sg_desc* desc) {
  7444. SOKOL_ASSERT(desc);
  7445. SOKOL_ASSERT((desc->_start_canary == 0) && (desc->_end_canary == 0));
  7446. memset(&_sg, 0, sizeof(_sg));
  7447. _sg_setup_pools(&_sg.pools, desc);
  7448. _sg.frame_index = 1;
  7449. _sg.next_draw_valid = false;
  7450. _sg_setup_backend(desc);
  7451. _sg.valid = true;
  7452. }
  7453. void sg_shutdown() {
  7454. _sg_destroy_all_resources(&_sg.pools);
  7455. _sg_discard_backend();
  7456. _sg_discard_pools(&_sg.pools);
  7457. _sg.valid = false;
  7458. }
  7459. bool sg_isvalid() {
  7460. return _sg.valid;
  7461. }
  7462. bool sg_query_feature(sg_feature f) {
  7463. return _sg_query_feature(f);
  7464. }
  7465. /*-- allocate resource id ----------------------------------------------------*/
  7466. sg_buffer sg_alloc_buffer() {
  7467. sg_buffer res;
  7468. res.id = _sg_pool_alloc_id(&_sg.pools.buffer_pool);
  7469. if (res.id != SG_INVALID_ID) {
  7470. _sg_buffer* buf = _sg_buffer_at(&_sg.pools, res.id);
  7471. SOKOL_ASSERT(buf && (buf->slot.state == SG_RESOURCESTATE_INITIAL) && (buf->slot.id == SG_INVALID_ID));
  7472. buf->slot.id = res.id;
  7473. buf->slot.state = SG_RESOURCESTATE_ALLOC;
  7474. }
  7475. return res;
  7476. }
  7477. sg_image sg_alloc_image() {
  7478. sg_image res;
  7479. res.id = _sg_pool_alloc_id(&_sg.pools.image_pool);
  7480. if (res.id != SG_INVALID_ID) {
  7481. _sg_image* img = _sg_image_at(&_sg.pools, res.id);
  7482. SOKOL_ASSERT(img && (img->slot.state == SG_RESOURCESTATE_INITIAL) && (img->slot.id == SG_INVALID_ID));
  7483. img->slot.id = res.id;
  7484. img->slot.state = SG_RESOURCESTATE_ALLOC;
  7485. }
  7486. return res;
  7487. }
  7488. sg_shader sg_alloc_shader() {
  7489. sg_shader res;
  7490. res.id = _sg_pool_alloc_id(&_sg.pools.shader_pool);
  7491. if (res.id != SG_INVALID_ID) {
  7492. _sg_shader* shd = _sg_shader_at(&_sg.pools, res.id);
  7493. SOKOL_ASSERT(shd && (shd->slot.state == SG_RESOURCESTATE_INITIAL) && (shd->slot.id == SG_INVALID_ID));
  7494. shd->slot.id = res.id;
  7495. shd->slot.state = SG_RESOURCESTATE_ALLOC;
  7496. }
  7497. return res;
  7498. }
  7499. sg_pipeline sg_alloc_pipeline() {
  7500. sg_pipeline res;
  7501. res.id = _sg_pool_alloc_id(&_sg.pools.pipeline_pool);
  7502. if (res.id != SG_INVALID_ID) {
  7503. _sg_pipeline* pip = _sg_pipeline_at(&_sg.pools, res.id);
  7504. SOKOL_ASSERT(pip && (pip->slot.state == SG_RESOURCESTATE_INITIAL) && (pip->slot.id == SG_INVALID_ID));
  7505. pip->slot.id = res.id;
  7506. pip->slot.state = SG_RESOURCESTATE_ALLOC;
  7507. }
  7508. return res;
  7509. }
  7510. sg_pass sg_alloc_pass() {
  7511. sg_pass res;
  7512. res.id = _sg_pool_alloc_id(&_sg.pools.pass_pool);
  7513. if (res.id != SG_INVALID_ID) {
  7514. _sg_pass* pass = _sg_pass_at(&_sg.pools, res.id);
  7515. SOKOL_ASSERT(pass && (pass->slot.state == SG_RESOURCESTATE_INITIAL) && (pass->slot.id == SG_INVALID_ID));
  7516. pass->slot.id = res.id;
  7517. pass->slot.state = SG_RESOURCESTATE_ALLOC;
  7518. }
  7519. return res;
  7520. }
  7521. /*-- initialize an allocated resource ----------------------------------------*/
  7522. void sg_init_buffer(sg_buffer buf_id, const sg_buffer_desc* desc) {
  7523. SOKOL_ASSERT(buf_id.id != SG_INVALID_ID && desc);
  7524. _sg_buffer* buf = _sg_lookup_buffer(&_sg.pools, buf_id.id);
  7525. SOKOL_ASSERT(buf && buf->slot.state == SG_RESOURCESTATE_ALLOC);
  7526. if (_sg_validate_buffer_desc(desc)) {
  7527. _sg_create_buffer(buf, desc);
  7528. }
  7529. else {
  7530. buf->slot.state = SG_RESOURCESTATE_FAILED;
  7531. }
  7532. SOKOL_ASSERT((buf->slot.state == SG_RESOURCESTATE_VALID)||(buf->slot.state == SG_RESOURCESTATE_FAILED));
  7533. }
  7534. void sg_init_image(sg_image img_id, const sg_image_desc* desc) {
  7535. SOKOL_ASSERT(img_id.id != SG_INVALID_ID && desc);
  7536. _sg_image* img = _sg_lookup_image(&_sg.pools, img_id.id);
  7537. SOKOL_ASSERT(img && img->slot.state == SG_RESOURCESTATE_ALLOC);
  7538. if (_sg_validate_image_desc(desc)) {
  7539. _sg_create_image(img, desc);
  7540. }
  7541. else {
  7542. img->slot.state = SG_RESOURCESTATE_FAILED;
  7543. }
  7544. SOKOL_ASSERT((img->slot.state == SG_RESOURCESTATE_VALID)||(img->slot.state == SG_RESOURCESTATE_FAILED));
  7545. }
  7546. void sg_init_shader(sg_shader shd_id, const sg_shader_desc* desc) {
  7547. SOKOL_ASSERT(shd_id.id != SG_INVALID_ID && desc);
  7548. _sg_shader* shd = _sg_lookup_shader(&_sg.pools, shd_id.id);
  7549. SOKOL_ASSERT(shd && shd->slot.state == SG_RESOURCESTATE_ALLOC);
  7550. if (_sg_validate_shader_desc(desc)) {
  7551. _sg_create_shader(shd, desc);
  7552. }
  7553. else {
  7554. shd->slot.state = SG_RESOURCESTATE_FAILED;
  7555. }
  7556. SOKOL_ASSERT((shd->slot.state == SG_RESOURCESTATE_VALID)||(shd->slot.state == SG_RESOURCESTATE_FAILED));
  7557. }
  7558. void sg_init_pipeline(sg_pipeline pip_id, const sg_pipeline_desc* desc) {
  7559. SOKOL_ASSERT(pip_id.id != SG_INVALID_ID && desc);
  7560. _sg_pipeline* pip = _sg_lookup_pipeline(&_sg.pools, pip_id.id);
  7561. SOKOL_ASSERT(pip && pip->slot.state == SG_RESOURCESTATE_ALLOC);
  7562. if (_sg_validate_pipeline_desc(desc)) {
  7563. _sg_shader* shd = _sg_lookup_shader(&_sg.pools, desc->shader.id);
  7564. SOKOL_ASSERT(shd && shd->slot.state == SG_RESOURCESTATE_VALID);
  7565. _sg_create_pipeline(pip, shd, desc);
  7566. }
  7567. else {
  7568. pip->slot.state = SG_RESOURCESTATE_FAILED;
  7569. }
  7570. SOKOL_ASSERT((pip->slot.state == SG_RESOURCESTATE_VALID)||(pip->slot.state == SG_RESOURCESTATE_FAILED));
  7571. }
  7572. void sg_init_pass(sg_pass pass_id, const sg_pass_desc* desc) {
  7573. SOKOL_ASSERT(pass_id.id != SG_INVALID_ID && desc);
  7574. _sg_pass* pass = _sg_lookup_pass(&_sg.pools, pass_id.id);
  7575. SOKOL_ASSERT(pass && pass->slot.state == SG_RESOURCESTATE_ALLOC);
  7576. if (_sg_validate_pass_desc(desc)) {
  7577. /* lookup pass attachment image pointers */
  7578. _sg_image* att_imgs[SG_MAX_COLOR_ATTACHMENTS + 1];
  7579. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  7580. if (desc->color_attachments[i].image.id) {
  7581. att_imgs[i] = _sg_lookup_image(&_sg.pools, desc->color_attachments[i].image.id);
  7582. SOKOL_ASSERT(att_imgs[i] && att_imgs[i]->slot.state == SG_RESOURCESTATE_VALID);
  7583. }
  7584. else {
  7585. att_imgs[i] = 0;
  7586. }
  7587. }
  7588. const int ds_att_index = SG_MAX_COLOR_ATTACHMENTS;
  7589. if (desc->depth_stencil_attachment.image.id) {
  7590. att_imgs[ds_att_index] = _sg_lookup_image(&_sg.pools, desc->depth_stencil_attachment.image.id);
  7591. SOKOL_ASSERT(att_imgs[ds_att_index] && att_imgs[ds_att_index]->slot.state == SG_RESOURCESTATE_VALID);
  7592. }
  7593. else {
  7594. att_imgs[ds_att_index] = 0;
  7595. }
  7596. _sg_create_pass(pass, att_imgs, desc);
  7597. }
  7598. else {
  7599. pass->slot.state = SG_RESOURCESTATE_FAILED;
  7600. }
  7601. SOKOL_ASSERT((pass->slot.state == SG_RESOURCESTATE_VALID)||(pass->slot.state == SG_RESOURCESTATE_FAILED));
  7602. }
  7603. /*-- set allocated resource to failed state ----------------------------------*/
  7604. void sg_fail_buffer(sg_buffer buf_id) {
  7605. SOKOL_ASSERT(buf_id.id != SG_INVALID_ID);
  7606. _sg_buffer* buf = _sg_lookup_buffer(&_sg.pools, buf_id.id);
  7607. SOKOL_ASSERT(buf && buf->slot.state == SG_RESOURCESTATE_ALLOC);
  7608. buf->slot.state = SG_RESOURCESTATE_FAILED;
  7609. }
  7610. void sg_fail_image(sg_image img_id) {
  7611. SOKOL_ASSERT(img_id.id != SG_INVALID_ID);
  7612. _sg_image* img = _sg_lookup_image(&_sg.pools, img_id.id);
  7613. SOKOL_ASSERT(img && img->slot.state == SG_RESOURCESTATE_ALLOC);
  7614. img->slot.state = SG_RESOURCESTATE_FAILED;
  7615. }
  7616. void sg_fail_shader(sg_shader shd_id) {
  7617. SOKOL_ASSERT(shd_id.id != SG_INVALID_ID);
  7618. _sg_shader* shd = _sg_lookup_shader(&_sg.pools, shd_id.id);
  7619. SOKOL_ASSERT(shd && shd->slot.state == SG_RESOURCESTATE_ALLOC);
  7620. shd->slot.state = SG_RESOURCESTATE_FAILED;
  7621. }
  7622. void sg_fail_pipeline(sg_pipeline pip_id) {
  7623. SOKOL_ASSERT(pip_id.id != SG_INVALID_ID);
  7624. _sg_pipeline* pip = _sg_lookup_pipeline(&_sg.pools, pip_id.id);
  7625. SOKOL_ASSERT(pip && pip->slot.state == SG_RESOURCESTATE_ALLOC);
  7626. pip->slot.state = SG_RESOURCESTATE_FAILED;
  7627. }
  7628. void sg_fail_pass(sg_pass pass_id) {
  7629. SOKOL_ASSERT(pass_id.id != SG_INVALID_ID);
  7630. _sg_pass* pass = _sg_lookup_pass(&_sg.pools, pass_id.id);
  7631. SOKOL_ASSERT(pass && pass->slot.state == SG_RESOURCESTATE_ALLOC);
  7632. pass->slot.state = SG_RESOURCESTATE_FAILED;
  7633. }
  7634. /*-- get resource state */
  7635. sg_resource_state sg_query_buffer_state(sg_buffer buf_id) {
  7636. if (buf_id.id != SG_INVALID_ID) {
  7637. _sg_buffer* buf = _sg_lookup_buffer(&_sg.pools, buf_id.id);
  7638. if (buf) {
  7639. return buf->slot.state;
  7640. }
  7641. }
  7642. return SG_RESOURCESTATE_INVALID;
  7643. }
  7644. sg_resource_state sg_query_image_state(sg_image img_id) {
  7645. if (img_id.id != SG_INVALID_ID) {
  7646. _sg_image* img = _sg_lookup_image(&_sg.pools, img_id.id);
  7647. if (img) {
  7648. return img->slot.state;
  7649. }
  7650. }
  7651. return SG_RESOURCESTATE_INVALID;
  7652. }
  7653. sg_resource_state sg_query_shader_state(sg_shader shd_id) {
  7654. if (shd_id.id != SG_INVALID_ID) {
  7655. _sg_shader* shd = _sg_lookup_shader(&_sg.pools, shd_id.id);
  7656. if (shd) {
  7657. return shd->slot.state;
  7658. }
  7659. }
  7660. return SG_RESOURCESTATE_INVALID;
  7661. }
  7662. sg_resource_state sg_query_pipeline_state(sg_pipeline pip_id) {
  7663. if (pip_id.id != SG_INVALID_ID) {
  7664. _sg_pipeline* pip = _sg_lookup_pipeline(&_sg.pools, pip_id.id);
  7665. if (pip) {
  7666. return pip->slot.state;
  7667. }
  7668. }
  7669. return SG_RESOURCESTATE_INVALID;
  7670. }
  7671. sg_resource_state sg_query_pass_state(sg_pass pass_id) {
  7672. if (pass_id.id != SG_INVALID_ID) {
  7673. _sg_pass* pass = _sg_lookup_pass(&_sg.pools, pass_id.id);
  7674. if (pass) {
  7675. return pass->slot.state;
  7676. }
  7677. }
  7678. return SG_RESOURCESTATE_INVALID;
  7679. }
  7680. /*-- allocate and initialize resource ----------------------------------------*/
  7681. sg_buffer sg_make_buffer(const sg_buffer_desc* desc) {
  7682. SOKOL_ASSERT(desc);
  7683. sg_buffer buf_id = sg_alloc_buffer();
  7684. if (buf_id.id != SG_INVALID_ID) {
  7685. sg_init_buffer(buf_id, desc);
  7686. }
  7687. else {
  7688. SOKOL_LOG("buffer pool exhausted!");
  7689. }
  7690. return buf_id;
  7691. }
  7692. sg_image sg_make_image(const sg_image_desc* desc) {
  7693. SOKOL_ASSERT(desc);
  7694. sg_image img_id = sg_alloc_image();
  7695. if (img_id.id != SG_INVALID_ID) {
  7696. sg_init_image(img_id, desc);
  7697. }
  7698. else {
  7699. SOKOL_LOG("image pool exhausted!");
  7700. }
  7701. return img_id;
  7702. }
  7703. sg_shader sg_make_shader(const sg_shader_desc* desc) {
  7704. SOKOL_ASSERT(desc);
  7705. sg_shader shd_id = sg_alloc_shader();
  7706. if (shd_id.id != SG_INVALID_ID) {
  7707. sg_init_shader(shd_id, desc);
  7708. }
  7709. else {
  7710. SOKOL_LOG("shader pool exhausted!");
  7711. }
  7712. return shd_id;
  7713. }
  7714. sg_pipeline sg_make_pipeline(const sg_pipeline_desc* desc) {
  7715. SOKOL_ASSERT(desc);
  7716. sg_pipeline pip_id = sg_alloc_pipeline();
  7717. if (pip_id.id != SG_INVALID_ID) {
  7718. sg_init_pipeline(pip_id, desc);
  7719. }
  7720. else {
  7721. SOKOL_LOG("pipeline pool exhausted!");
  7722. }
  7723. return pip_id;
  7724. }
  7725. sg_pass sg_make_pass(const sg_pass_desc* desc) {
  7726. SOKOL_ASSERT(desc);
  7727. sg_pass pass_id = sg_alloc_pass();
  7728. if (pass_id.id != SG_INVALID_ID) {
  7729. sg_init_pass(pass_id, desc);
  7730. }
  7731. else {
  7732. SOKOL_LOG("pass pool exhausted!");
  7733. }
  7734. return pass_id;
  7735. }
  7736. /*-- destroy resource --------------------------------------------------------*/
  7737. void sg_destroy_buffer(sg_buffer buf_id) {
  7738. _sg_buffer* buf = _sg_lookup_buffer(&_sg.pools, buf_id.id);
  7739. if (buf) {
  7740. _sg_destroy_buffer(buf);
  7741. _sg_pool_free_id(&_sg.pools.buffer_pool, buf_id.id);
  7742. }
  7743. }
  7744. void sg_destroy_image(sg_image img_id) {
  7745. _sg_image* img = _sg_lookup_image(&_sg.pools, img_id.id);
  7746. if (img) {
  7747. _sg_destroy_image(img);
  7748. _sg_pool_free_id(&_sg.pools.image_pool, img_id.id);
  7749. }
  7750. }
  7751. void sg_destroy_shader(sg_shader shd_id) {
  7752. _sg_shader* shd = _sg_lookup_shader(&_sg.pools, shd_id.id);
  7753. if (shd) {
  7754. _sg_destroy_shader(shd);
  7755. _sg_pool_free_id(&_sg.pools.shader_pool, shd_id.id);
  7756. }
  7757. }
  7758. void sg_destroy_pipeline(sg_pipeline pip_id) {
  7759. _sg_pipeline* pip = _sg_lookup_pipeline(&_sg.pools, pip_id.id);
  7760. if (pip) {
  7761. _sg_destroy_pipeline(pip);
  7762. _sg_pool_free_id(&_sg.pools.pipeline_pool, pip_id.id);
  7763. }
  7764. }
  7765. void sg_destroy_pass(sg_pass pass_id) {
  7766. _sg_pass* pass = _sg_lookup_pass(&_sg.pools, pass_id.id);
  7767. if (pass) {
  7768. _sg_destroy_pass(pass);
  7769. _sg_pool_free_id(&_sg.pools.pass_pool, pass_id.id);
  7770. }
  7771. }
  7772. void sg_begin_default_pass(const sg_pass_action* pass_action, int width, int height) {
  7773. SOKOL_ASSERT(pass_action);
  7774. SOKOL_ASSERT((pass_action->_start_canary == 0) && (pass_action->_end_canary == 0));
  7775. sg_pass_action pa;
  7776. _sg_resolve_default_pass_action(pass_action, &pa);
  7777. _sg.cur_pass.id = SG_INVALID_ID;
  7778. _sg.pass_valid = true;
  7779. _sg_begin_pass(0, &pa, width, height);
  7780. }
  7781. void sg_begin_pass(sg_pass pass_id, const sg_pass_action* pass_action) {
  7782. SOKOL_ASSERT(pass_action);
  7783. SOKOL_ASSERT((pass_action->_start_canary == 0) && (pass_action->_end_canary == 0));
  7784. _sg.cur_pass = pass_id;
  7785. _sg_pass* pass = _sg_lookup_pass(&_sg.pools, pass_id.id);
  7786. if (pass && _sg_validate_begin_pass(pass)) {
  7787. _sg.pass_valid = true;
  7788. sg_pass_action pa;
  7789. _sg_resolve_default_pass_action(pass_action, &pa);
  7790. const int w = pass->color_atts[0].image->width;
  7791. const int h = pass->color_atts[0].image->height;
  7792. _sg_begin_pass(pass, &pa, w, h);
  7793. }
  7794. else {
  7795. _sg.pass_valid = false;
  7796. }
  7797. }
  7798. void sg_apply_viewport(int x, int y, int width, int height, bool origin_top_left) {
  7799. if (!_sg.pass_valid) {
  7800. return;
  7801. }
  7802. _sg_apply_viewport(x, y, width, height, origin_top_left);
  7803. }
  7804. void sg_apply_scissor_rect(int x, int y, int width, int height, bool origin_top_left) {
  7805. if (!_sg.pass_valid) {
  7806. return;
  7807. }
  7808. _sg_apply_scissor_rect(x, y, width, height, origin_top_left);
  7809. }
  7810. void sg_apply_draw_state(const sg_draw_state* ds) {
  7811. SOKOL_ASSERT(ds);
  7812. SOKOL_ASSERT((ds->_start_canary==0) && (ds->_end_canary==0));
  7813. if (!_sg_validate_draw_state(ds)) {
  7814. _sg.next_draw_valid = false;
  7815. return;
  7816. }
  7817. if (!_sg.pass_valid) {
  7818. return;
  7819. }
  7820. _sg.next_draw_valid = true;
  7821. _sg.cur_pipeline = ds->pipeline;
  7822. /* lookup resource pointers, resources which are not in SG_RESOURCESTATE_VALID
  7823. are not a fatal error, but supress the following drawcalls, this is to
  7824. allow for simple asynchronous resource setup
  7825. */
  7826. _sg_pipeline* pip = _sg_lookup_pipeline(&_sg.pools, ds->pipeline.id);
  7827. SOKOL_ASSERT(pip);
  7828. _sg.next_draw_valid &= (SG_RESOURCESTATE_VALID == pip->slot.state);
  7829. SOKOL_ASSERT(pip->shader && (pip->shader->slot.id == pip->shader_id.id));
  7830. _sg_buffer* vbs[SG_MAX_SHADERSTAGE_BUFFERS] = { 0 };
  7831. int num_vbs = 0;
  7832. for (int i = 0; i < SG_MAX_SHADERSTAGE_BUFFERS; i++, num_vbs++) {
  7833. if (ds->vertex_buffers[i].id) {
  7834. vbs[i] = _sg_lookup_buffer(&_sg.pools, ds->vertex_buffers[i].id);
  7835. SOKOL_ASSERT(vbs[i]);
  7836. _sg.next_draw_valid &= (SG_RESOURCESTATE_VALID == vbs[i]->slot.state);
  7837. }
  7838. else {
  7839. break;
  7840. }
  7841. }
  7842. _sg_buffer* ib = 0;
  7843. if (ds->index_buffer.id) {
  7844. ib = _sg_lookup_buffer(&_sg.pools, ds->index_buffer.id);
  7845. SOKOL_ASSERT(ib);
  7846. _sg.next_draw_valid &= (SG_RESOURCESTATE_VALID == ib->slot.state);
  7847. }
  7848. _sg_image* vs_imgs[SG_MAX_SHADERSTAGE_IMAGES] = { 0 };
  7849. int num_vs_imgs = 0;
  7850. for (int i = 0; i < SG_MAX_SHADERSTAGE_IMAGES; i++, num_vs_imgs++) {
  7851. if (ds->vs_images[i].id) {
  7852. vs_imgs[i] = _sg_lookup_image(&_sg.pools, ds->vs_images[i].id);
  7853. SOKOL_ASSERT(vs_imgs[i]);
  7854. _sg.next_draw_valid &= (SG_RESOURCESTATE_VALID == vs_imgs[i]->slot.state);
  7855. }
  7856. else {
  7857. break;
  7858. }
  7859. }
  7860. _sg_image* fs_imgs[SG_MAX_SHADERSTAGE_IMAGES] = { 0 };
  7861. int num_fs_imgs = 0;
  7862. for (int i = 0; i < SG_MAX_SHADERSTAGE_IMAGES; i++, num_fs_imgs++) {
  7863. if (ds->fs_images[i].id) {
  7864. fs_imgs[i] = _sg_lookup_image(&_sg.pools, ds->fs_images[i].id);
  7865. SOKOL_ASSERT(fs_imgs[i]);
  7866. _sg.next_draw_valid &= (SG_RESOURCESTATE_VALID == fs_imgs[i]->slot.state);
  7867. }
  7868. else {
  7869. break;
  7870. }
  7871. }
  7872. if (_sg.next_draw_valid) {
  7873. _sg_apply_draw_state(pip, vbs, num_vbs, ib, vs_imgs, num_vs_imgs, fs_imgs, num_fs_imgs);
  7874. }
  7875. }
  7876. void sg_apply_uniform_block(sg_shader_stage stage, int ub_index, const void* data, int num_bytes) {
  7877. SOKOL_ASSERT((stage == SG_SHADERSTAGE_VS) || (stage == SG_SHADERSTAGE_FS));
  7878. SOKOL_ASSERT((ub_index >= 0) && (ub_index < SG_MAX_SHADERSTAGE_UBS));
  7879. SOKOL_ASSERT(data && (num_bytes > 0));
  7880. if (!_sg_validate_apply_uniform_block(stage, ub_index, data, num_bytes)) {
  7881. _sg.next_draw_valid = false;
  7882. return;
  7883. }
  7884. if (!(_sg.pass_valid && _sg.next_draw_valid)) {
  7885. return;
  7886. }
  7887. _sg_apply_uniform_block(stage, ub_index, data, num_bytes);
  7888. }
  7889. void sg_draw(int base_element, int num_elements, int num_instances) {
  7890. if (!(_sg.pass_valid && _sg.next_draw_valid)) {
  7891. return;
  7892. }
  7893. _sg_draw(base_element, num_elements, num_instances);
  7894. }
  7895. void sg_end_pass() {
  7896. if (!_sg.pass_valid) {
  7897. return;
  7898. }
  7899. _sg_end_pass();
  7900. _sg.cur_pass.id = SG_INVALID_ID;
  7901. _sg.cur_pipeline.id = SG_INVALID_ID;
  7902. _sg.pass_valid = false;
  7903. }
  7904. void sg_commit() {
  7905. _sg_commit();
  7906. _sg.frame_index++;
  7907. }
  7908. void sg_reset_state_cache() {
  7909. _sg_reset_state_cache();
  7910. }
  7911. void sg_update_buffer(sg_buffer buf_id, const void* data, int num_bytes) {
  7912. if (num_bytes == 0) {
  7913. return;
  7914. }
  7915. _sg_buffer* buf = _sg_lookup_buffer(&_sg.pools, buf_id.id);
  7916. if (!(buf && buf->slot.state == SG_RESOURCESTATE_VALID)) {
  7917. return;
  7918. }
  7919. if (_sg_validate_update_buffer(buf, data, num_bytes)) {
  7920. SOKOL_ASSERT(buf->upd_frame_index != _sg.frame_index);
  7921. _sg_update_buffer(buf, data, num_bytes);
  7922. buf->upd_frame_index = _sg.frame_index;
  7923. }
  7924. }
  7925. void sg_update_image(sg_image img_id, const sg_image_content* data) {
  7926. _sg_image* img = _sg_lookup_image(&_sg.pools, img_id.id);
  7927. if (!(img && img->slot.state == SG_RESOURCESTATE_VALID)) {
  7928. return;
  7929. }
  7930. if (_sg_validate_update_image(img, data)) {
  7931. SOKOL_ASSERT(img->upd_frame_index != _sg.frame_index);
  7932. _sg_update_image(img, data);
  7933. img->upd_frame_index = _sg.frame_index;
  7934. }
  7935. }
  7936. #ifdef __cplusplus
  7937. } /* extern "C" */
  7938. #endif
  7939. #endif /* SOKOL_IMPL */