vulkan_raii.hpp 1.2 MB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837883888398840884188428843884488458846884788488849885088518852885388548855885688578858885988608861886288638864886588668867886888698870887188728873887488758876887788788879888088818882888388848885888688878888888988908891889288938894889588968897889888998900890189028903890489058906890789088909891089118912891389148915891689178918891989208921892289238924892589268927892889298930893189328933893489358936893789388939894089418942894389448945894689478948894989508951895289538954895589568957895889598960896189628963896489658966896789688969897089718972897389748975897689778978897989808981898289838984898589868987898889898990899189928993899489958996899789988999900090019002900390049005900690079008900990109011901290139014901590169017901890199020902190229023902490259026902790289029903090319032903390349035903690379038903990409041904290439044904590469047904890499050905190529053905490559056905790589059906090619062906390649065906690679068906990709071907290739074907590769077907890799080908190829083908490859086908790889089909090919092909390949095909690979098909991009101910291039104910591069107910891099110911191129113911491159116911791189119912091219122912391249125912691279128912991309131913291339134913591369137913891399140914191429143914491459146914791489149915091519152915391549155915691579158915991609161916291639164916591669167916891699170917191729173917491759176917791789179918091819182918391849185918691879188918991909191919291939194919591969197919891999200920192029203920492059206920792089209921092119212921392149215921692179218921992209221922292239224922592269227922892299230923192329233923492359236923792389239924092419242924392449245924692479248924992509251925292539254925592569257925892599260926192629263926492659266926792689269927092719272927392749275927692779278927992809281928292839284928592869287928892899290929192929293929492959296929792989299930093019302930393049305930693079308930993109311931293139314931593169317931893199320932193229323932493259326932793289329933093319332933393349335933693379338933993409341934293439344934593469347934893499350935193529353935493559356935793589359936093619362936393649365936693679368936993709371937293739374937593769377937893799380938193829383938493859386938793889389939093919392939393949395939693979398939994009401940294039404940594069407940894099410941194129413941494159416941794189419942094219422942394249425942694279428942994309431943294339434943594369437943894399440944194429443944494459446944794489449945094519452945394549455945694579458945994609461946294639464946594669467946894699470947194729473947494759476947794789479948094819482948394849485948694879488948994909491949294939494949594969497949894999500950195029503950495059506950795089509951095119512951395149515951695179518951995209521952295239524952595269527952895299530953195329533953495359536953795389539954095419542954395449545954695479548954995509551955295539554955595569557955895599560956195629563956495659566956795689569957095719572957395749575957695779578957995809581958295839584958595869587958895899590959195929593959495959596959795989599960096019602960396049605960696079608960996109611961296139614961596169617961896199620962196229623962496259626962796289629963096319632963396349635963696379638963996409641964296439644964596469647964896499650965196529653965496559656965796589659966096619662966396649665966696679668966996709671967296739674967596769677967896799680968196829683968496859686968796889689969096919692969396949695969696979698969997009701970297039704970597069707970897099710971197129713971497159716971797189719972097219722972397249725972697279728972997309731973297339734973597369737973897399740974197429743974497459746974797489749975097519752975397549755975697579758975997609761976297639764976597669767976897699770977197729773977497759776977797789779978097819782978397849785978697879788978997909791979297939794979597969797979897999800980198029803980498059806980798089809981098119812981398149815981698179818981998209821982298239824982598269827982898299830983198329833983498359836983798389839984098419842984398449845984698479848984998509851985298539854985598569857985898599860986198629863986498659866986798689869987098719872987398749875987698779878987998809881988298839884988598869887988898899890989198929893989498959896989798989899990099019902990399049905990699079908990999109911991299139914991599169917991899199920992199229923992499259926992799289929993099319932993399349935993699379938993999409941994299439944994599469947994899499950995199529953995499559956995799589959996099619962996399649965996699679968996999709971997299739974997599769977997899799980998199829983998499859986998799889989999099919992999399949995999699979998999910000100011000210003100041000510006100071000810009100101001110012100131001410015100161001710018100191002010021100221002310024100251002610027100281002910030100311003210033100341003510036100371003810039100401004110042100431004410045100461004710048100491005010051100521005310054100551005610057100581005910060100611006210063100641006510066100671006810069100701007110072100731007410075100761007710078100791008010081100821008310084100851008610087100881008910090100911009210093100941009510096100971009810099101001010110102101031010410105101061010710108101091011010111101121011310114101151011610117101181011910120101211012210123101241012510126101271012810129101301013110132101331013410135101361013710138101391014010141101421014310144101451014610147101481014910150101511015210153101541015510156101571015810159101601016110162101631016410165101661016710168101691017010171101721017310174101751017610177101781017910180101811018210183101841018510186101871018810189101901019110192101931019410195101961019710198101991020010201102021020310204102051020610207102081020910210102111021210213102141021510216102171021810219102201022110222102231022410225102261022710228102291023010231102321023310234102351023610237102381023910240102411024210243102441024510246102471024810249102501025110252102531025410255102561025710258102591026010261102621026310264102651026610267102681026910270102711027210273102741027510276102771027810279102801028110282102831028410285102861028710288102891029010291102921029310294102951029610297102981029910300103011030210303103041030510306103071030810309103101031110312103131031410315103161031710318103191032010321103221032310324103251032610327103281032910330103311033210333103341033510336103371033810339103401034110342103431034410345103461034710348103491035010351103521035310354103551035610357103581035910360103611036210363103641036510366103671036810369103701037110372103731037410375103761037710378103791038010381103821038310384103851038610387103881038910390103911039210393103941039510396103971039810399104001040110402104031040410405104061040710408104091041010411104121041310414104151041610417104181041910420104211042210423104241042510426104271042810429104301043110432104331043410435104361043710438104391044010441104421044310444104451044610447104481044910450104511045210453104541045510456104571045810459104601046110462104631046410465104661046710468104691047010471104721047310474104751047610477104781047910480104811048210483104841048510486104871048810489104901049110492104931049410495104961049710498104991050010501105021050310504105051050610507105081050910510105111051210513105141051510516105171051810519105201052110522105231052410525105261052710528105291053010531105321053310534105351053610537105381053910540105411054210543105441054510546105471054810549105501055110552105531055410555105561055710558105591056010561105621056310564105651056610567105681056910570105711057210573105741057510576105771057810579105801058110582105831058410585105861058710588105891059010591105921059310594105951059610597105981059910600106011060210603106041060510606106071060810609106101061110612106131061410615106161061710618106191062010621106221062310624106251062610627106281062910630106311063210633106341063510636106371063810639106401064110642106431064410645106461064710648106491065010651106521065310654106551065610657106581065910660106611066210663106641066510666106671066810669106701067110672106731067410675106761067710678106791068010681106821068310684106851068610687106881068910690106911069210693106941069510696106971069810699107001070110702107031070410705107061070710708107091071010711107121071310714107151071610717107181071910720107211072210723107241072510726107271072810729107301073110732107331073410735107361073710738107391074010741107421074310744107451074610747107481074910750107511075210753107541075510756107571075810759107601076110762107631076410765107661076710768107691077010771107721077310774107751077610777107781077910780107811078210783107841078510786107871078810789107901079110792107931079410795107961079710798107991080010801108021080310804108051080610807108081080910810108111081210813108141081510816108171081810819108201082110822108231082410825108261082710828108291083010831108321083310834108351083610837108381083910840108411084210843108441084510846108471084810849108501085110852108531085410855108561085710858108591086010861108621086310864108651086610867108681086910870108711087210873108741087510876108771087810879108801088110882108831088410885108861088710888108891089010891108921089310894108951089610897108981089910900109011090210903109041090510906109071090810909109101091110912109131091410915109161091710918109191092010921109221092310924109251092610927109281092910930109311093210933109341093510936109371093810939109401094110942109431094410945109461094710948109491095010951109521095310954109551095610957109581095910960109611096210963109641096510966109671096810969109701097110972109731097410975109761097710978109791098010981109821098310984109851098610987109881098910990109911099210993109941099510996109971099810999110001100111002110031100411005110061100711008110091101011011110121101311014110151101611017110181101911020110211102211023110241102511026110271102811029110301103111032110331103411035110361103711038110391104011041110421104311044110451104611047110481104911050110511105211053110541105511056110571105811059110601106111062110631106411065110661106711068110691107011071110721107311074110751107611077110781107911080110811108211083110841108511086110871108811089110901109111092110931109411095110961109711098110991110011101111021110311104111051110611107111081110911110111111111211113111141111511116111171111811119111201112111122111231112411125111261112711128111291113011131111321113311134111351113611137111381113911140111411114211143111441114511146111471114811149111501115111152111531115411155111561115711158111591116011161111621116311164111651116611167111681116911170111711117211173111741117511176111771117811179111801118111182111831118411185111861118711188111891119011191111921119311194111951119611197111981119911200112011120211203112041120511206112071120811209112101121111212112131121411215112161121711218112191122011221112221122311224112251122611227112281122911230112311123211233112341123511236112371123811239112401124111242112431124411245112461124711248112491125011251112521125311254112551125611257112581125911260112611126211263112641126511266112671126811269112701127111272112731127411275112761127711278112791128011281112821128311284112851128611287112881128911290112911129211293112941129511296112971129811299113001130111302113031130411305113061130711308113091131011311113121131311314113151131611317113181131911320113211132211323113241132511326113271132811329113301133111332113331133411335113361133711338113391134011341113421134311344113451134611347113481134911350113511135211353113541135511356113571135811359113601136111362113631136411365113661136711368113691137011371113721137311374113751137611377113781137911380113811138211383113841138511386113871138811389113901139111392113931139411395113961139711398113991140011401114021140311404114051140611407114081140911410114111141211413114141141511416114171141811419114201142111422114231142411425114261142711428114291143011431114321143311434114351143611437114381143911440114411144211443114441144511446114471144811449114501145111452114531145411455114561145711458114591146011461114621146311464114651146611467114681146911470114711147211473114741147511476114771147811479114801148111482114831148411485114861148711488114891149011491114921149311494114951149611497114981149911500115011150211503115041150511506115071150811509115101151111512115131151411515115161151711518115191152011521115221152311524115251152611527115281152911530115311153211533115341153511536115371153811539115401154111542115431154411545115461154711548115491155011551115521155311554115551155611557115581155911560115611156211563115641156511566115671156811569115701157111572115731157411575115761157711578115791158011581115821158311584115851158611587115881158911590115911159211593115941159511596115971159811599116001160111602116031160411605116061160711608116091161011611116121161311614116151161611617116181161911620116211162211623116241162511626116271162811629116301163111632116331163411635116361163711638116391164011641116421164311644116451164611647116481164911650116511165211653116541165511656116571165811659116601166111662116631166411665116661166711668116691167011671116721167311674116751167611677116781167911680116811168211683116841168511686116871168811689116901169111692116931169411695116961169711698116991170011701117021170311704117051170611707117081170911710117111171211713117141171511716117171171811719117201172111722117231172411725117261172711728117291173011731117321173311734117351173611737117381173911740117411174211743117441174511746117471174811749117501175111752117531175411755117561175711758117591176011761117621176311764117651176611767117681176911770117711177211773117741177511776117771177811779117801178111782117831178411785117861178711788117891179011791117921179311794117951179611797117981179911800118011180211803118041180511806118071180811809118101181111812118131181411815118161181711818118191182011821118221182311824118251182611827118281182911830118311183211833118341183511836118371183811839118401184111842118431184411845118461184711848118491185011851118521185311854118551185611857118581185911860118611186211863118641186511866118671186811869118701187111872118731187411875118761187711878118791188011881118821188311884118851188611887118881188911890118911189211893118941189511896118971189811899119001190111902119031190411905119061190711908119091191011911119121191311914119151191611917119181191911920119211192211923119241192511926119271192811929119301193111932119331193411935119361193711938119391194011941119421194311944119451194611947119481194911950119511195211953119541195511956119571195811959119601196111962119631196411965119661196711968119691197011971119721197311974119751197611977119781197911980119811198211983119841198511986119871198811989119901199111992119931199411995119961199711998119991200012001120021200312004120051200612007120081200912010120111201212013120141201512016120171201812019120201202112022120231202412025120261202712028120291203012031120321203312034120351203612037120381203912040120411204212043120441204512046120471204812049120501205112052120531205412055120561205712058120591206012061120621206312064120651206612067120681206912070120711207212073120741207512076120771207812079120801208112082120831208412085120861208712088120891209012091120921209312094120951209612097120981209912100121011210212103121041210512106121071210812109121101211112112121131211412115121161211712118121191212012121121221212312124121251212612127121281212912130121311213212133121341213512136121371213812139121401214112142121431214412145121461214712148121491215012151121521215312154121551215612157121581215912160121611216212163121641216512166121671216812169121701217112172121731217412175121761217712178121791218012181121821218312184121851218612187121881218912190121911219212193121941219512196121971219812199122001220112202122031220412205122061220712208122091221012211122121221312214122151221612217122181221912220122211222212223122241222512226122271222812229122301223112232122331223412235122361223712238122391224012241122421224312244122451224612247122481224912250122511225212253122541225512256122571225812259122601226112262122631226412265122661226712268122691227012271122721227312274122751227612277122781227912280122811228212283122841228512286122871228812289122901229112292122931229412295122961229712298122991230012301123021230312304123051230612307123081230912310123111231212313123141231512316123171231812319123201232112322123231232412325123261232712328123291233012331123321233312334123351233612337123381233912340123411234212343123441234512346123471234812349123501235112352123531235412355123561235712358123591236012361123621236312364123651236612367123681236912370123711237212373123741237512376123771237812379123801238112382123831238412385123861238712388123891239012391123921239312394123951239612397123981239912400124011240212403124041240512406124071240812409124101241112412124131241412415124161241712418124191242012421124221242312424124251242612427124281242912430124311243212433124341243512436124371243812439124401244112442124431244412445124461244712448124491245012451124521245312454124551245612457124581245912460124611246212463124641246512466124671246812469124701247112472124731247412475124761247712478124791248012481124821248312484124851248612487124881248912490124911249212493124941249512496124971249812499125001250112502125031250412505125061250712508125091251012511125121251312514125151251612517125181251912520125211252212523125241252512526125271252812529125301253112532125331253412535125361253712538125391254012541125421254312544125451254612547125481254912550125511255212553125541255512556125571255812559125601256112562125631256412565125661256712568125691257012571125721257312574125751257612577125781257912580125811258212583125841258512586125871258812589125901259112592125931259412595125961259712598125991260012601126021260312604126051260612607126081260912610126111261212613126141261512616126171261812619126201262112622126231262412625126261262712628126291263012631126321263312634126351263612637126381263912640126411264212643126441264512646126471264812649126501265112652126531265412655126561265712658126591266012661126621266312664126651266612667126681266912670126711267212673126741267512676126771267812679126801268112682126831268412685126861268712688126891269012691126921269312694126951269612697126981269912700127011270212703127041270512706127071270812709127101271112712127131271412715127161271712718127191272012721127221272312724127251272612727127281272912730127311273212733127341273512736127371273812739127401274112742127431274412745127461274712748127491275012751127521275312754127551275612757127581275912760127611276212763127641276512766127671276812769127701277112772127731277412775127761277712778127791278012781127821278312784127851278612787127881278912790127911279212793127941279512796127971279812799128001280112802128031280412805128061280712808128091281012811128121281312814128151281612817128181281912820128211282212823128241282512826128271282812829128301283112832128331283412835128361283712838128391284012841128421284312844128451284612847128481284912850128511285212853128541285512856128571285812859128601286112862128631286412865128661286712868128691287012871128721287312874128751287612877128781287912880128811288212883128841288512886128871288812889128901289112892128931289412895128961289712898128991290012901129021290312904129051290612907129081290912910129111291212913129141291512916129171291812919129201292112922129231292412925129261292712928129291293012931129321293312934129351293612937129381293912940129411294212943129441294512946129471294812949129501295112952129531295412955129561295712958129591296012961129621296312964129651296612967129681296912970129711297212973129741297512976129771297812979129801298112982129831298412985129861298712988129891299012991129921299312994129951299612997129981299913000130011300213003130041300513006130071300813009130101301113012130131301413015130161301713018130191302013021130221302313024130251302613027130281302913030130311303213033130341303513036130371303813039130401304113042130431304413045130461304713048130491305013051130521305313054130551305613057130581305913060130611306213063130641306513066130671306813069130701307113072130731307413075130761307713078130791308013081130821308313084130851308613087130881308913090130911309213093130941309513096130971309813099131001310113102131031310413105131061310713108131091311013111131121311313114131151311613117131181311913120131211312213123131241312513126131271312813129131301313113132131331313413135131361313713138131391314013141131421314313144131451314613147131481314913150131511315213153131541315513156131571315813159131601316113162131631316413165131661316713168131691317013171131721317313174131751317613177131781317913180131811318213183131841318513186131871318813189131901319113192131931319413195131961319713198131991320013201132021320313204132051320613207132081320913210132111321213213132141321513216132171321813219132201322113222132231322413225132261322713228132291323013231132321323313234132351323613237132381323913240132411324213243132441324513246132471324813249132501325113252132531325413255132561325713258132591326013261132621326313264132651326613267132681326913270132711327213273132741327513276132771327813279132801328113282132831328413285132861328713288132891329013291132921329313294132951329613297132981329913300133011330213303133041330513306133071330813309133101331113312133131331413315133161331713318133191332013321133221332313324133251332613327133281332913330133311333213333133341333513336133371333813339133401334113342133431334413345133461334713348133491335013351133521335313354133551335613357133581335913360133611336213363133641336513366133671336813369133701337113372133731337413375133761337713378133791338013381133821338313384133851338613387133881338913390133911339213393133941339513396133971339813399134001340113402134031340413405134061340713408134091341013411134121341313414134151341613417134181341913420134211342213423134241342513426134271342813429134301343113432134331343413435134361343713438134391344013441134421344313444134451344613447134481344913450134511345213453134541345513456134571345813459134601346113462134631346413465134661346713468134691347013471134721347313474134751347613477134781347913480134811348213483134841348513486134871348813489134901349113492134931349413495134961349713498134991350013501135021350313504135051350613507135081350913510135111351213513135141351513516135171351813519135201352113522135231352413525135261352713528135291353013531135321353313534135351353613537135381353913540135411354213543135441354513546135471354813549135501355113552135531355413555135561355713558135591356013561135621356313564135651356613567135681356913570135711357213573135741357513576135771357813579135801358113582135831358413585135861358713588135891359013591135921359313594135951359613597135981359913600136011360213603136041360513606136071360813609136101361113612136131361413615136161361713618136191362013621136221362313624136251362613627136281362913630136311363213633136341363513636136371363813639136401364113642136431364413645136461364713648136491365013651136521365313654136551365613657136581365913660136611366213663136641366513666136671366813669136701367113672136731367413675136761367713678136791368013681136821368313684136851368613687136881368913690136911369213693136941369513696136971369813699137001370113702137031370413705137061370713708137091371013711137121371313714137151371613717137181371913720137211372213723137241372513726137271372813729137301373113732137331373413735137361373713738137391374013741137421374313744137451374613747137481374913750137511375213753137541375513756137571375813759137601376113762137631376413765137661376713768137691377013771137721377313774137751377613777137781377913780137811378213783137841378513786137871378813789137901379113792137931379413795137961379713798137991380013801138021380313804138051380613807138081380913810138111381213813138141381513816138171381813819138201382113822138231382413825138261382713828138291383013831138321383313834138351383613837138381383913840138411384213843138441384513846138471384813849138501385113852138531385413855138561385713858138591386013861138621386313864138651386613867138681386913870138711387213873138741387513876138771387813879138801388113882138831388413885138861388713888138891389013891138921389313894138951389613897138981389913900139011390213903139041390513906139071390813909139101391113912139131391413915139161391713918139191392013921139221392313924139251392613927139281392913930139311393213933139341393513936139371393813939139401394113942139431394413945139461394713948139491395013951139521395313954139551395613957139581395913960139611396213963139641396513966139671396813969139701397113972139731397413975139761397713978139791398013981139821398313984139851398613987139881398913990139911399213993139941399513996139971399813999140001400114002140031400414005140061400714008140091401014011140121401314014140151401614017140181401914020140211402214023140241402514026140271402814029140301403114032140331403414035140361403714038140391404014041140421404314044140451404614047140481404914050140511405214053140541405514056140571405814059140601406114062140631406414065140661406714068140691407014071140721407314074140751407614077140781407914080140811408214083140841408514086140871408814089140901409114092140931409414095140961409714098140991410014101141021410314104141051410614107141081410914110141111411214113141141411514116141171411814119141201412114122141231412414125141261412714128141291413014131141321413314134141351413614137141381413914140141411414214143141441414514146141471414814149141501415114152141531415414155141561415714158141591416014161141621416314164141651416614167141681416914170141711417214173141741417514176141771417814179141801418114182141831418414185141861418714188141891419014191141921419314194141951419614197141981419914200142011420214203142041420514206142071420814209142101421114212142131421414215142161421714218142191422014221142221422314224142251422614227142281422914230142311423214233142341423514236142371423814239142401424114242142431424414245142461424714248142491425014251142521425314254142551425614257142581425914260142611426214263142641426514266142671426814269142701427114272142731427414275142761427714278142791428014281142821428314284142851428614287142881428914290142911429214293142941429514296142971429814299143001430114302143031430414305143061430714308143091431014311143121431314314143151431614317143181431914320143211432214323143241432514326143271432814329143301433114332143331433414335143361433714338143391434014341143421434314344143451434614347143481434914350143511435214353143541435514356143571435814359143601436114362143631436414365143661436714368143691437014371143721437314374143751437614377143781437914380143811438214383143841438514386143871438814389143901439114392143931439414395143961439714398143991440014401144021440314404144051440614407144081440914410144111441214413144141441514416144171441814419144201442114422144231442414425144261442714428144291443014431144321443314434144351443614437144381443914440144411444214443144441444514446144471444814449144501445114452144531445414455144561445714458144591446014461144621446314464144651446614467144681446914470144711447214473144741447514476144771447814479144801448114482144831448414485144861448714488144891449014491144921449314494144951449614497144981449914500145011450214503145041450514506145071450814509145101451114512145131451414515145161451714518145191452014521145221452314524145251452614527145281452914530145311453214533145341453514536145371453814539145401454114542145431454414545145461454714548145491455014551145521455314554145551455614557145581455914560145611456214563145641456514566145671456814569145701457114572145731457414575145761457714578145791458014581145821458314584145851458614587145881458914590145911459214593145941459514596145971459814599146001460114602146031460414605146061460714608146091461014611146121461314614146151461614617146181461914620146211462214623146241462514626146271462814629146301463114632146331463414635146361463714638146391464014641146421464314644146451464614647146481464914650146511465214653146541465514656146571465814659146601466114662146631466414665146661466714668146691467014671146721467314674146751467614677146781467914680146811468214683146841468514686146871468814689146901469114692146931469414695146961469714698146991470014701147021470314704147051470614707147081470914710147111471214713147141471514716147171471814719147201472114722147231472414725147261472714728147291473014731147321473314734147351473614737147381473914740147411474214743147441474514746147471474814749147501475114752147531475414755147561475714758147591476014761147621476314764147651476614767147681476914770147711477214773147741477514776147771477814779147801478114782147831478414785147861478714788147891479014791147921479314794147951479614797147981479914800148011480214803148041480514806148071480814809148101481114812148131481414815148161481714818148191482014821148221482314824148251482614827148281482914830148311483214833148341483514836148371483814839148401484114842148431484414845148461484714848148491485014851148521485314854148551485614857148581485914860148611486214863148641486514866148671486814869148701487114872148731487414875148761487714878148791488014881148821488314884148851488614887148881488914890148911489214893148941489514896148971489814899149001490114902149031490414905149061490714908149091491014911149121491314914149151491614917149181491914920149211492214923149241492514926149271492814929149301493114932149331493414935149361493714938149391494014941149421494314944149451494614947149481494914950149511495214953149541495514956149571495814959149601496114962149631496414965149661496714968149691497014971149721497314974149751497614977149781497914980149811498214983149841498514986149871498814989149901499114992149931499414995149961499714998149991500015001150021500315004150051500615007150081500915010150111501215013150141501515016150171501815019150201502115022150231502415025150261502715028150291503015031150321503315034150351503615037150381503915040150411504215043150441504515046150471504815049150501505115052150531505415055150561505715058150591506015061150621506315064150651506615067150681506915070150711507215073150741507515076150771507815079150801508115082150831508415085150861508715088150891509015091150921509315094150951509615097150981509915100151011510215103151041510515106151071510815109151101511115112151131511415115151161511715118151191512015121151221512315124151251512615127151281512915130151311513215133151341513515136151371513815139151401514115142151431514415145151461514715148151491515015151151521515315154151551515615157151581515915160151611516215163151641516515166151671516815169151701517115172151731517415175151761517715178151791518015181151821518315184151851518615187151881518915190151911519215193151941519515196151971519815199152001520115202152031520415205152061520715208152091521015211152121521315214152151521615217152181521915220152211522215223152241522515226152271522815229152301523115232152331523415235152361523715238152391524015241152421524315244152451524615247152481524915250152511525215253152541525515256152571525815259152601526115262152631526415265152661526715268152691527015271152721527315274152751527615277152781527915280152811528215283152841528515286152871528815289152901529115292152931529415295152961529715298152991530015301153021530315304153051530615307153081530915310153111531215313153141531515316153171531815319153201532115322153231532415325153261532715328153291533015331153321533315334153351533615337153381533915340153411534215343153441534515346153471534815349153501535115352153531535415355153561535715358153591536015361153621536315364153651536615367153681536915370153711537215373153741537515376153771537815379153801538115382153831538415385153861538715388153891539015391153921539315394153951539615397153981539915400154011540215403154041540515406154071540815409154101541115412154131541415415154161541715418154191542015421154221542315424154251542615427154281542915430154311543215433154341543515436154371543815439154401544115442154431544415445154461544715448154491545015451154521545315454154551545615457154581545915460154611546215463154641546515466154671546815469154701547115472154731547415475154761547715478154791548015481154821548315484154851548615487154881548915490154911549215493154941549515496154971549815499155001550115502155031550415505155061550715508155091551015511155121551315514155151551615517155181551915520155211552215523155241552515526155271552815529155301553115532155331553415535155361553715538155391554015541155421554315544155451554615547155481554915550155511555215553155541555515556155571555815559155601556115562155631556415565155661556715568155691557015571155721557315574155751557615577155781557915580155811558215583155841558515586155871558815589155901559115592155931559415595155961559715598155991560015601156021560315604156051560615607156081560915610156111561215613156141561515616156171561815619156201562115622156231562415625156261562715628156291563015631156321563315634156351563615637156381563915640156411564215643156441564515646156471564815649156501565115652156531565415655156561565715658156591566015661156621566315664156651566615667156681566915670156711567215673156741567515676156771567815679156801568115682156831568415685156861568715688156891569015691156921569315694156951569615697156981569915700157011570215703157041570515706157071570815709157101571115712157131571415715157161571715718157191572015721157221572315724157251572615727157281572915730157311573215733157341573515736157371573815739157401574115742157431574415745157461574715748157491575015751157521575315754157551575615757157581575915760157611576215763157641576515766157671576815769157701577115772157731577415775157761577715778157791578015781157821578315784157851578615787157881578915790157911579215793157941579515796157971579815799158001580115802158031580415805158061580715808158091581015811158121581315814158151581615817158181581915820158211582215823158241582515826158271582815829158301583115832158331583415835158361583715838158391584015841158421584315844158451584615847158481584915850158511585215853158541585515856158571585815859158601586115862158631586415865158661586715868158691587015871158721587315874158751587615877158781587915880158811588215883158841588515886158871588815889158901589115892158931589415895158961589715898158991590015901159021590315904159051590615907159081590915910159111591215913159141591515916159171591815919159201592115922159231592415925159261592715928159291593015931159321593315934159351593615937159381593915940159411594215943159441594515946159471594815949159501595115952159531595415955159561595715958159591596015961159621596315964159651596615967159681596915970159711597215973159741597515976159771597815979159801598115982159831598415985159861598715988159891599015991159921599315994159951599615997159981599916000160011600216003160041600516006160071600816009160101601116012160131601416015160161601716018160191602016021160221602316024160251602616027160281602916030160311603216033160341603516036160371603816039160401604116042160431604416045160461604716048160491605016051160521605316054160551605616057160581605916060160611606216063160641606516066160671606816069160701607116072160731607416075160761607716078160791608016081160821608316084160851608616087160881608916090160911609216093160941609516096160971609816099161001610116102161031610416105161061610716108161091611016111161121611316114161151611616117161181611916120161211612216123161241612516126161271612816129161301613116132161331613416135161361613716138161391614016141161421614316144161451614616147161481614916150161511615216153161541615516156161571615816159161601616116162161631616416165161661616716168161691617016171161721617316174161751617616177161781617916180161811618216183161841618516186161871618816189161901619116192161931619416195161961619716198161991620016201162021620316204162051620616207162081620916210162111621216213162141621516216162171621816219162201622116222162231622416225162261622716228162291623016231162321623316234162351623616237162381623916240162411624216243162441624516246162471624816249162501625116252162531625416255162561625716258162591626016261162621626316264162651626616267162681626916270162711627216273162741627516276162771627816279162801628116282162831628416285162861628716288162891629016291162921629316294162951629616297162981629916300163011630216303163041630516306163071630816309163101631116312163131631416315163161631716318163191632016321163221632316324163251632616327163281632916330163311633216333163341633516336163371633816339163401634116342163431634416345163461634716348163491635016351163521635316354163551635616357163581635916360163611636216363163641636516366163671636816369163701637116372163731637416375163761637716378163791638016381163821638316384163851638616387163881638916390163911639216393163941639516396163971639816399164001640116402164031640416405164061640716408164091641016411164121641316414164151641616417164181641916420164211642216423164241642516426164271642816429164301643116432164331643416435164361643716438164391644016441164421644316444164451644616447164481644916450164511645216453164541645516456164571645816459164601646116462164631646416465164661646716468164691647016471164721647316474164751647616477164781647916480164811648216483164841648516486164871648816489164901649116492164931649416495164961649716498164991650016501165021650316504165051650616507165081650916510165111651216513165141651516516165171651816519165201652116522165231652416525165261652716528165291653016531165321653316534165351653616537165381653916540165411654216543165441654516546165471654816549165501655116552165531655416555165561655716558165591656016561165621656316564165651656616567165681656916570165711657216573165741657516576165771657816579165801658116582165831658416585165861658716588165891659016591165921659316594165951659616597165981659916600166011660216603166041660516606166071660816609166101661116612166131661416615166161661716618166191662016621166221662316624166251662616627166281662916630166311663216633166341663516636166371663816639166401664116642166431664416645166461664716648166491665016651166521665316654166551665616657166581665916660166611666216663166641666516666166671666816669166701667116672166731667416675166761667716678166791668016681166821668316684166851668616687166881668916690166911669216693166941669516696166971669816699167001670116702167031670416705167061670716708167091671016711167121671316714167151671616717167181671916720167211672216723167241672516726167271672816729167301673116732167331673416735167361673716738167391674016741167421674316744167451674616747167481674916750167511675216753167541675516756167571675816759167601676116762167631676416765167661676716768167691677016771167721677316774167751677616777167781677916780167811678216783167841678516786167871678816789167901679116792167931679416795167961679716798167991680016801168021680316804168051680616807168081680916810168111681216813168141681516816168171681816819168201682116822168231682416825168261682716828168291683016831168321683316834168351683616837168381683916840168411684216843168441684516846168471684816849168501685116852168531685416855168561685716858168591686016861168621686316864168651686616867168681686916870168711687216873168741687516876168771687816879168801688116882168831688416885168861688716888168891689016891168921689316894168951689616897168981689916900169011690216903169041690516906169071690816909169101691116912169131691416915169161691716918169191692016921169221692316924169251692616927169281692916930169311693216933169341693516936169371693816939169401694116942169431694416945169461694716948169491695016951169521695316954169551695616957169581695916960169611696216963169641696516966169671696816969169701697116972169731697416975169761697716978169791698016981169821698316984169851698616987169881698916990169911699216993169941699516996169971699816999170001700117002170031700417005170061700717008170091701017011170121701317014170151701617017170181701917020170211702217023170241702517026170271702817029170301703117032170331703417035170361703717038170391704017041170421704317044170451704617047170481704917050170511705217053170541705517056170571705817059170601706117062170631706417065170661706717068170691707017071170721707317074170751707617077170781707917080170811708217083170841708517086170871708817089170901709117092170931709417095170961709717098170991710017101171021710317104171051710617107171081710917110171111711217113171141711517116171171711817119171201712117122171231712417125171261712717128171291713017131171321713317134171351713617137171381713917140171411714217143171441714517146171471714817149171501715117152171531715417155171561715717158171591716017161171621716317164171651716617167171681716917170171711717217173171741717517176171771717817179171801718117182171831718417185171861718717188171891719017191171921719317194171951719617197171981719917200172011720217203172041720517206172071720817209172101721117212172131721417215172161721717218172191722017221172221722317224172251722617227172281722917230172311723217233172341723517236172371723817239172401724117242172431724417245172461724717248172491725017251172521725317254172551725617257172581725917260172611726217263172641726517266172671726817269172701727117272172731727417275172761727717278172791728017281172821728317284172851728617287172881728917290172911729217293172941729517296172971729817299173001730117302173031730417305173061730717308173091731017311173121731317314173151731617317173181731917320173211732217323173241732517326173271732817329173301733117332173331733417335173361733717338173391734017341173421734317344173451734617347173481734917350173511735217353173541735517356173571735817359173601736117362173631736417365173661736717368173691737017371173721737317374173751737617377173781737917380173811738217383173841738517386173871738817389173901739117392173931739417395173961739717398173991740017401174021740317404174051740617407174081740917410174111741217413174141741517416174171741817419174201742117422174231742417425174261742717428174291743017431174321743317434174351743617437174381743917440174411744217443174441744517446174471744817449174501745117452174531745417455174561745717458174591746017461174621746317464174651746617467174681746917470174711747217473174741747517476174771747817479174801748117482174831748417485174861748717488174891749017491174921749317494174951749617497174981749917500175011750217503175041750517506175071750817509175101751117512175131751417515175161751717518175191752017521175221752317524175251752617527175281752917530175311753217533175341753517536175371753817539175401754117542175431754417545175461754717548175491755017551175521755317554175551755617557175581755917560175611756217563175641756517566175671756817569175701757117572175731757417575175761757717578175791758017581175821758317584175851758617587175881758917590175911759217593175941759517596175971759817599176001760117602176031760417605176061760717608176091761017611176121761317614176151761617617176181761917620176211762217623176241762517626176271762817629176301763117632176331763417635176361763717638176391764017641176421764317644176451764617647176481764917650176511765217653176541765517656176571765817659176601766117662176631766417665176661766717668176691767017671176721767317674176751767617677176781767917680176811768217683176841768517686176871768817689176901769117692176931769417695176961769717698176991770017701177021770317704177051770617707177081770917710177111771217713177141771517716177171771817719177201772117722177231772417725177261772717728177291773017731177321773317734177351773617737177381773917740177411774217743177441774517746177471774817749177501775117752177531775417755177561775717758177591776017761177621776317764177651776617767177681776917770177711777217773177741777517776177771777817779177801778117782177831778417785177861778717788177891779017791177921779317794177951779617797177981779917800178011780217803178041780517806178071780817809178101781117812178131781417815178161781717818178191782017821178221782317824178251782617827178281782917830178311783217833178341783517836178371783817839178401784117842178431784417845178461784717848178491785017851178521785317854178551785617857178581785917860178611786217863178641786517866178671786817869178701787117872178731787417875178761787717878178791788017881178821788317884178851788617887178881788917890178911789217893178941789517896178971789817899179001790117902179031790417905179061790717908179091791017911179121791317914179151791617917179181791917920179211792217923179241792517926179271792817929179301793117932179331793417935179361793717938179391794017941179421794317944179451794617947179481794917950179511795217953179541795517956179571795817959179601796117962179631796417965179661796717968179691797017971179721797317974179751797617977179781797917980179811798217983179841798517986179871798817989179901799117992179931799417995179961799717998179991800018001180021800318004180051800618007180081800918010180111801218013180141801518016180171801818019180201802118022180231802418025180261802718028180291803018031180321803318034180351803618037180381803918040180411804218043180441804518046180471804818049180501805118052180531805418055180561805718058180591806018061180621806318064180651806618067180681806918070180711807218073180741807518076180771807818079180801808118082180831808418085180861808718088180891809018091180921809318094180951809618097180981809918100181011810218103181041810518106181071810818109181101811118112181131811418115181161811718118181191812018121181221812318124181251812618127181281812918130181311813218133181341813518136181371813818139181401814118142181431814418145181461814718148181491815018151181521815318154181551815618157181581815918160181611816218163181641816518166181671816818169181701817118172181731817418175181761817718178181791818018181181821818318184181851818618187181881818918190181911819218193181941819518196181971819818199182001820118202182031820418205182061820718208182091821018211182121821318214182151821618217182181821918220182211822218223182241822518226182271822818229182301823118232182331823418235182361823718238182391824018241182421824318244182451824618247182481824918250182511825218253182541825518256182571825818259182601826118262182631826418265182661826718268182691827018271182721827318274182751827618277182781827918280182811828218283182841828518286182871828818289182901829118292182931829418295182961829718298182991830018301183021830318304183051830618307183081830918310183111831218313183141831518316183171831818319183201832118322183231832418325183261832718328183291833018331183321833318334183351833618337183381833918340183411834218343183441834518346183471834818349183501835118352183531835418355183561835718358183591836018361183621836318364183651836618367183681836918370183711837218373183741837518376183771837818379183801838118382183831838418385183861838718388183891839018391183921839318394183951839618397183981839918400184011840218403184041840518406184071840818409184101841118412184131841418415184161841718418184191842018421184221842318424184251842618427184281842918430184311843218433184341843518436184371843818439184401844118442184431844418445184461844718448184491845018451184521845318454184551845618457184581845918460184611846218463184641846518466184671846818469184701847118472184731847418475184761847718478184791848018481184821848318484184851848618487184881848918490184911849218493184941849518496184971849818499185001850118502185031850418505185061850718508185091851018511185121851318514185151851618517185181851918520185211852218523185241852518526185271852818529185301853118532185331853418535185361853718538185391854018541185421854318544185451854618547185481854918550185511855218553185541855518556185571855818559185601856118562185631856418565185661856718568185691857018571185721857318574185751857618577185781857918580185811858218583185841858518586185871858818589185901859118592185931859418595185961859718598185991860018601186021860318604186051860618607186081860918610186111861218613186141861518616186171861818619186201862118622186231862418625186261862718628186291863018631186321863318634186351863618637186381863918640186411864218643186441864518646186471864818649186501865118652186531865418655186561865718658186591866018661186621866318664186651866618667186681866918670186711867218673186741867518676186771867818679186801868118682186831868418685186861868718688186891869018691186921869318694186951869618697186981869918700187011870218703187041870518706187071870818709187101871118712187131871418715187161871718718187191872018721187221872318724187251872618727187281872918730187311873218733187341873518736187371873818739187401874118742187431874418745187461874718748187491875018751187521875318754187551875618757187581875918760187611876218763187641876518766187671876818769187701877118772187731877418775187761877718778187791878018781187821878318784187851878618787187881878918790187911879218793187941879518796187971879818799188001880118802188031880418805188061880718808188091881018811188121881318814188151881618817188181881918820188211882218823188241882518826188271882818829188301883118832188331883418835188361883718838188391884018841188421884318844188451884618847188481884918850188511885218853188541885518856188571885818859188601886118862188631886418865188661886718868188691887018871188721887318874188751887618877188781887918880188811888218883188841888518886188871888818889188901889118892188931889418895188961889718898188991890018901189021890318904189051890618907189081890918910189111891218913189141891518916189171891818919189201892118922189231892418925189261892718928189291893018931189321893318934189351893618937189381893918940189411894218943189441894518946189471894818949189501895118952189531895418955189561895718958189591896018961189621896318964189651896618967189681896918970189711897218973189741897518976189771897818979189801898118982189831898418985189861898718988189891899018991189921899318994189951899618997189981899919000190011900219003190041900519006190071900819009190101901119012190131901419015190161901719018190191902019021190221902319024190251902619027190281902919030190311903219033190341903519036190371903819039190401904119042190431904419045190461904719048190491905019051190521905319054190551905619057190581905919060190611906219063190641906519066190671906819069190701907119072190731907419075190761907719078190791908019081190821908319084190851908619087190881908919090190911909219093190941909519096190971909819099191001910119102191031910419105191061910719108191091911019111191121911319114191151911619117191181911919120191211912219123191241912519126191271912819129191301913119132191331913419135191361913719138191391914019141191421914319144191451914619147191481914919150191511915219153191541915519156191571915819159191601916119162191631916419165191661916719168191691917019171191721917319174191751917619177191781917919180191811918219183191841918519186191871918819189191901919119192191931919419195191961919719198191991920019201192021920319204192051920619207192081920919210192111921219213192141921519216192171921819219192201922119222192231922419225192261922719228192291923019231192321923319234192351923619237192381923919240192411924219243192441924519246192471924819249192501925119252192531925419255192561925719258192591926019261192621926319264192651926619267192681926919270192711927219273192741927519276192771927819279192801928119282192831928419285192861928719288192891929019291192921929319294192951929619297192981929919300193011930219303193041930519306193071930819309193101931119312193131931419315193161931719318193191932019321193221932319324193251932619327193281932919330193311933219333193341933519336193371933819339193401934119342193431934419345193461934719348193491935019351193521935319354193551935619357193581935919360193611936219363193641936519366193671936819369193701937119372193731937419375193761937719378193791938019381193821938319384193851938619387193881938919390193911939219393193941939519396193971939819399194001940119402194031940419405194061940719408194091941019411194121941319414194151941619417194181941919420194211942219423194241942519426194271942819429194301943119432194331943419435194361943719438194391944019441194421944319444194451944619447194481944919450194511945219453194541945519456194571945819459194601946119462194631946419465194661946719468194691947019471194721947319474194751947619477194781947919480194811948219483194841948519486194871948819489194901949119492194931949419495194961949719498194991950019501195021950319504195051950619507195081950919510195111951219513195141951519516195171951819519195201952119522195231952419525195261952719528195291953019531195321953319534195351953619537195381953919540195411954219543195441954519546195471954819549195501955119552195531955419555195561955719558195591956019561195621956319564195651956619567195681956919570195711957219573195741957519576195771957819579195801958119582195831958419585195861958719588195891959019591195921959319594195951959619597195981959919600196011960219603196041960519606196071960819609196101961119612196131961419615196161961719618196191962019621196221962319624196251962619627196281962919630196311963219633196341963519636196371963819639196401964119642196431964419645196461964719648196491965019651196521965319654196551965619657196581965919660196611966219663196641966519666196671966819669196701967119672196731967419675196761967719678196791968019681196821968319684196851968619687196881968919690196911969219693196941969519696196971969819699197001970119702197031970419705197061970719708197091971019711197121971319714197151971619717197181971919720197211972219723197241972519726197271972819729197301973119732197331973419735197361973719738197391974019741197421974319744197451974619747197481974919750197511975219753197541975519756197571975819759197601976119762197631976419765197661976719768197691977019771197721977319774197751977619777197781977919780197811978219783197841978519786197871978819789197901979119792197931979419795197961979719798197991980019801198021980319804198051980619807198081980919810198111981219813198141981519816198171981819819198201982119822198231982419825198261982719828198291983019831198321983319834198351983619837198381983919840198411984219843198441984519846198471984819849198501985119852198531985419855198561985719858198591986019861198621986319864198651986619867198681986919870198711987219873198741987519876198771987819879198801988119882198831988419885198861988719888198891989019891198921989319894198951989619897198981989919900199011990219903199041990519906199071990819909199101991119912199131991419915199161991719918199191992019921199221992319924199251992619927199281992919930199311993219933199341993519936199371993819939199401994119942199431994419945199461994719948199491995019951199521995319954199551995619957199581995919960199611996219963199641996519966199671996819969199701997119972199731997419975199761997719978199791998019981199821998319984199851998619987199881998919990199911999219993199941999519996199971999819999200002000120002200032000420005200062000720008200092001020011200122001320014200152001620017200182001920020200212002220023200242002520026200272002820029200302003120032200332003420035200362003720038200392004020041200422004320044200452004620047200482004920050200512005220053200542005520056200572005820059200602006120062200632006420065200662006720068200692007020071200722007320074200752007620077200782007920080200812008220083200842008520086200872008820089200902009120092200932009420095200962009720098200992010020101201022010320104201052010620107201082010920110201112011220113201142011520116201172011820119201202012120122201232012420125201262012720128201292013020131201322013320134201352013620137201382013920140201412014220143201442014520146201472014820149201502015120152201532015420155201562015720158201592016020161201622016320164201652016620167201682016920170201712017220173201742017520176201772017820179201802018120182201832018420185201862018720188201892019020191201922019320194201952019620197201982019920200202012020220203202042020520206202072020820209202102021120212202132021420215202162021720218202192022020221202222022320224202252022620227202282022920230202312023220233202342023520236202372023820239202402024120242202432024420245202462024720248202492025020251202522025320254202552025620257202582025920260202612026220263202642026520266202672026820269202702027120272202732027420275202762027720278202792028020281202822028320284202852028620287202882028920290202912029220293202942029520296202972029820299203002030120302203032030420305203062030720308203092031020311203122031320314203152031620317203182031920320203212032220323203242032520326203272032820329203302033120332203332033420335203362033720338203392034020341203422034320344203452034620347203482034920350203512035220353203542035520356203572035820359203602036120362203632036420365203662036720368203692037020371203722037320374203752037620377203782037920380203812038220383203842038520386203872038820389203902039120392203932039420395203962039720398203992040020401204022040320404204052040620407204082040920410204112041220413204142041520416204172041820419204202042120422204232042420425204262042720428204292043020431204322043320434204352043620437204382043920440204412044220443204442044520446204472044820449204502045120452204532045420455204562045720458204592046020461204622046320464204652046620467204682046920470204712047220473204742047520476204772047820479204802048120482204832048420485204862048720488204892049020491204922049320494204952049620497204982049920500205012050220503205042050520506205072050820509205102051120512205132051420515205162051720518205192052020521205222052320524205252052620527205282052920530205312053220533205342053520536205372053820539205402054120542205432054420545205462054720548205492055020551205522055320554205552055620557205582055920560205612056220563205642056520566205672056820569205702057120572205732057420575205762057720578205792058020581205822058320584205852058620587205882058920590205912059220593205942059520596205972059820599206002060120602206032060420605206062060720608206092061020611206122061320614206152061620617206182061920620206212062220623206242062520626206272062820629206302063120632206332063420635206362063720638206392064020641206422064320644206452064620647206482064920650206512065220653206542065520656206572065820659206602066120662206632066420665206662066720668206692067020671206722067320674206752067620677206782067920680206812068220683206842068520686206872068820689206902069120692206932069420695206962069720698206992070020701207022070320704207052070620707207082070920710207112071220713207142071520716207172071820719207202072120722207232072420725207262072720728207292073020731207322073320734207352073620737207382073920740207412074220743207442074520746207472074820749207502075120752207532075420755207562075720758207592076020761207622076320764207652076620767207682076920770207712077220773207742077520776207772077820779207802078120782207832078420785207862078720788207892079020791207922079320794207952079620797207982079920800208012080220803208042080520806208072080820809208102081120812208132081420815208162081720818208192082020821208222082320824208252082620827208282082920830208312083220833208342083520836208372083820839208402084120842208432084420845208462084720848208492085020851208522085320854208552085620857208582085920860208612086220863208642086520866208672086820869208702087120872208732087420875208762087720878208792088020881208822088320884208852088620887
  1. // Copyright 2015-2023 The Khronos Group Inc.
  2. //
  3. // SPDX-License-Identifier: Apache-2.0 OR MIT
  4. //
  5. // This header is generated from the Khronos Vulkan XML API Registry.
  6. #ifndef VULKAN_RAII_HPP
  7. #define VULKAN_RAII_HPP
  8. #include <memory>
  9. #include <utility> // std::exchange, std::forward
  10. #include <vulkan/vulkan.hpp>
  11. #if !defined( VULKAN_HPP_RAII_NAMESPACE )
  12. # define VULKAN_HPP_RAII_NAMESPACE raii
  13. #endif
  14. #if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) && !defined( VULKAN_HPP_NO_EXCEPTIONS )
  15. namespace VULKAN_HPP_NAMESPACE
  16. {
  17. namespace VULKAN_HPP_RAII_NAMESPACE
  18. {
  19. template <class T, class U = T>
  20. VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_INLINE T exchange( T & obj, U && newValue )
  21. {
  22. # if ( 14 <= VULKAN_HPP_CPP_VERSION )
  23. return std::exchange<T>( obj, std::forward<U>( newValue ) );
  24. # else
  25. T oldValue = std::move( obj );
  26. obj = std::forward<U>( newValue );
  27. return oldValue;
  28. # endif
  29. }
  30. class ContextDispatcher : public DispatchLoaderBase
  31. {
  32. public:
  33. ContextDispatcher( PFN_vkGetInstanceProcAddr getProcAddr )
  34. : vkGetInstanceProcAddr( getProcAddr )
  35. //=== VK_VERSION_1_0 ===
  36. , vkCreateInstance( PFN_vkCreateInstance( getProcAddr( NULL, "vkCreateInstance" ) ) )
  37. , vkEnumerateInstanceExtensionProperties( PFN_vkEnumerateInstanceExtensionProperties( getProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) ) )
  38. , vkEnumerateInstanceLayerProperties( PFN_vkEnumerateInstanceLayerProperties( getProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) ) )
  39. //=== VK_VERSION_1_1 ===
  40. , vkEnumerateInstanceVersion( PFN_vkEnumerateInstanceVersion( getProcAddr( NULL, "vkEnumerateInstanceVersion" ) ) )
  41. {
  42. }
  43. public:
  44. PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0;
  45. //=== VK_VERSION_1_0 ===
  46. PFN_vkCreateInstance vkCreateInstance = 0;
  47. PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0;
  48. PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0;
  49. //=== VK_VERSION_1_1 ===
  50. PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0;
  51. };
  52. class InstanceDispatcher : public DispatchLoaderBase
  53. {
  54. public:
  55. InstanceDispatcher( PFN_vkGetInstanceProcAddr getProcAddr, VkInstance instance ) : vkGetInstanceProcAddr( getProcAddr )
  56. {
  57. //=== VK_VERSION_1_0 ===
  58. vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) );
  59. vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) );
  60. vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) );
  61. vkGetPhysicalDeviceFormatProperties =
  62. PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) );
  63. vkGetPhysicalDeviceImageFormatProperties =
  64. PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) );
  65. vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) );
  66. vkGetPhysicalDeviceQueueFamilyProperties =
  67. PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) );
  68. vkGetPhysicalDeviceMemoryProperties =
  69. PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) );
  70. vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) );
  71. vkEnumerateDeviceExtensionProperties =
  72. PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) );
  73. vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) );
  74. vkGetPhysicalDeviceSparseImageFormatProperties =
  75. PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) );
  76. //=== VK_VERSION_1_1 ===
  77. vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) );
  78. vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) );
  79. vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) );
  80. vkGetPhysicalDeviceFormatProperties2 =
  81. PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) );
  82. vkGetPhysicalDeviceImageFormatProperties2 =
  83. PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) );
  84. vkGetPhysicalDeviceQueueFamilyProperties2 =
  85. PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) );
  86. vkGetPhysicalDeviceMemoryProperties2 =
  87. PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) );
  88. vkGetPhysicalDeviceSparseImageFormatProperties2 =
  89. PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) );
  90. vkGetPhysicalDeviceExternalBufferProperties =
  91. PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) );
  92. vkGetPhysicalDeviceExternalFenceProperties =
  93. PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) );
  94. vkGetPhysicalDeviceExternalSemaphoreProperties =
  95. PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) );
  96. //=== VK_VERSION_1_3 ===
  97. vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) );
  98. //=== VK_KHR_surface ===
  99. vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) );
  100. vkGetPhysicalDeviceSurfaceSupportKHR =
  101. PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) );
  102. vkGetPhysicalDeviceSurfaceCapabilitiesKHR =
  103. PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) );
  104. vkGetPhysicalDeviceSurfaceFormatsKHR =
  105. PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) );
  106. vkGetPhysicalDeviceSurfacePresentModesKHR =
  107. PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) );
  108. //=== VK_KHR_swapchain ===
  109. vkGetPhysicalDevicePresentRectanglesKHR =
  110. PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) );
  111. //=== VK_KHR_display ===
  112. vkGetPhysicalDeviceDisplayPropertiesKHR =
  113. PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) );
  114. vkGetPhysicalDeviceDisplayPlanePropertiesKHR =
  115. PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) );
  116. vkGetDisplayPlaneSupportedDisplaysKHR =
  117. PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) );
  118. vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) );
  119. vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) );
  120. vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) );
  121. vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) );
  122. # if defined( VK_USE_PLATFORM_XLIB_KHR )
  123. //=== VK_KHR_xlib_surface ===
  124. vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) );
  125. vkGetPhysicalDeviceXlibPresentationSupportKHR =
  126. PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) );
  127. # endif /*VK_USE_PLATFORM_XLIB_KHR*/
  128. # if defined( VK_USE_PLATFORM_XCB_KHR )
  129. //=== VK_KHR_xcb_surface ===
  130. vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) );
  131. vkGetPhysicalDeviceXcbPresentationSupportKHR =
  132. PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) );
  133. # endif /*VK_USE_PLATFORM_XCB_KHR*/
  134. # if defined( VK_USE_PLATFORM_WAYLAND_KHR )
  135. //=== VK_KHR_wayland_surface ===
  136. vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) );
  137. vkGetPhysicalDeviceWaylandPresentationSupportKHR =
  138. PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) );
  139. # endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  140. # if defined( VK_USE_PLATFORM_ANDROID_KHR )
  141. //=== VK_KHR_android_surface ===
  142. vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) );
  143. # endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  144. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  145. //=== VK_KHR_win32_surface ===
  146. vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) );
  147. vkGetPhysicalDeviceWin32PresentationSupportKHR =
  148. PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) );
  149. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  150. //=== VK_EXT_debug_report ===
  151. vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) );
  152. vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) );
  153. vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) );
  154. //=== VK_KHR_video_queue ===
  155. vkGetPhysicalDeviceVideoCapabilitiesKHR =
  156. PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) );
  157. vkGetPhysicalDeviceVideoFormatPropertiesKHR =
  158. PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) );
  159. # if defined( VK_USE_PLATFORM_GGP )
  160. //=== VK_GGP_stream_descriptor_surface ===
  161. vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) );
  162. # endif /*VK_USE_PLATFORM_GGP*/
  163. //=== VK_NV_external_memory_capabilities ===
  164. vkGetPhysicalDeviceExternalImageFormatPropertiesNV =
  165. PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) );
  166. //=== VK_KHR_get_physical_device_properties2 ===
  167. vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) );
  168. if ( !vkGetPhysicalDeviceFeatures2 )
  169. vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR;
  170. vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) );
  171. if ( !vkGetPhysicalDeviceProperties2 )
  172. vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR;
  173. vkGetPhysicalDeviceFormatProperties2KHR =
  174. PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) );
  175. if ( !vkGetPhysicalDeviceFormatProperties2 )
  176. vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR;
  177. vkGetPhysicalDeviceImageFormatProperties2KHR =
  178. PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) );
  179. if ( !vkGetPhysicalDeviceImageFormatProperties2 )
  180. vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR;
  181. vkGetPhysicalDeviceQueueFamilyProperties2KHR =
  182. PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) );
  183. if ( !vkGetPhysicalDeviceQueueFamilyProperties2 )
  184. vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR;
  185. vkGetPhysicalDeviceMemoryProperties2KHR =
  186. PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) );
  187. if ( !vkGetPhysicalDeviceMemoryProperties2 )
  188. vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR;
  189. vkGetPhysicalDeviceSparseImageFormatProperties2KHR =
  190. PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) );
  191. if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 )
  192. vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR;
  193. # if defined( VK_USE_PLATFORM_VI_NN )
  194. //=== VK_NN_vi_surface ===
  195. vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) );
  196. # endif /*VK_USE_PLATFORM_VI_NN*/
  197. //=== VK_KHR_device_group_creation ===
  198. vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) );
  199. if ( !vkEnumeratePhysicalDeviceGroups )
  200. vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR;
  201. //=== VK_KHR_external_memory_capabilities ===
  202. vkGetPhysicalDeviceExternalBufferPropertiesKHR =
  203. PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) );
  204. if ( !vkGetPhysicalDeviceExternalBufferProperties )
  205. vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR;
  206. //=== VK_KHR_external_semaphore_capabilities ===
  207. vkGetPhysicalDeviceExternalSemaphorePropertiesKHR =
  208. PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) );
  209. if ( !vkGetPhysicalDeviceExternalSemaphoreProperties )
  210. vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR;
  211. //=== VK_EXT_direct_mode_display ===
  212. vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) );
  213. # if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
  214. //=== VK_EXT_acquire_xlib_display ===
  215. vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) );
  216. vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) );
  217. # endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
  218. //=== VK_EXT_display_surface_counter ===
  219. vkGetPhysicalDeviceSurfaceCapabilities2EXT =
  220. PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) );
  221. //=== VK_KHR_external_fence_capabilities ===
  222. vkGetPhysicalDeviceExternalFencePropertiesKHR =
  223. PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) );
  224. if ( !vkGetPhysicalDeviceExternalFenceProperties )
  225. vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR;
  226. //=== VK_KHR_performance_query ===
  227. vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
  228. vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) );
  229. vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
  230. vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) );
  231. //=== VK_KHR_get_surface_capabilities2 ===
  232. vkGetPhysicalDeviceSurfaceCapabilities2KHR =
  233. PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) );
  234. vkGetPhysicalDeviceSurfaceFormats2KHR =
  235. PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) );
  236. //=== VK_KHR_get_display_properties2 ===
  237. vkGetPhysicalDeviceDisplayProperties2KHR =
  238. PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) );
  239. vkGetPhysicalDeviceDisplayPlaneProperties2KHR =
  240. PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) );
  241. vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) );
  242. vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) );
  243. # if defined( VK_USE_PLATFORM_IOS_MVK )
  244. //=== VK_MVK_ios_surface ===
  245. vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) );
  246. # endif /*VK_USE_PLATFORM_IOS_MVK*/
  247. # if defined( VK_USE_PLATFORM_MACOS_MVK )
  248. //=== VK_MVK_macos_surface ===
  249. vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) );
  250. # endif /*VK_USE_PLATFORM_MACOS_MVK*/
  251. //=== VK_EXT_debug_utils ===
  252. vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) );
  253. vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) );
  254. vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) );
  255. //=== VK_EXT_sample_locations ===
  256. vkGetPhysicalDeviceMultisamplePropertiesEXT =
  257. PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) );
  258. //=== VK_EXT_calibrated_timestamps ===
  259. vkGetPhysicalDeviceCalibrateableTimeDomainsEXT =
  260. PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) );
  261. # if defined( VK_USE_PLATFORM_FUCHSIA )
  262. //=== VK_FUCHSIA_imagepipe_surface ===
  263. vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) );
  264. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  265. # if defined( VK_USE_PLATFORM_METAL_EXT )
  266. //=== VK_EXT_metal_surface ===
  267. vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) );
  268. # endif /*VK_USE_PLATFORM_METAL_EXT*/
  269. //=== VK_KHR_fragment_shading_rate ===
  270. vkGetPhysicalDeviceFragmentShadingRatesKHR =
  271. PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) );
  272. //=== VK_EXT_tooling_info ===
  273. vkGetPhysicalDeviceToolPropertiesEXT =
  274. PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) );
  275. if ( !vkGetPhysicalDeviceToolProperties )
  276. vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT;
  277. //=== VK_NV_cooperative_matrix ===
  278. vkGetPhysicalDeviceCooperativeMatrixPropertiesNV =
  279. PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) );
  280. //=== VK_NV_coverage_reduction_mode ===
  281. vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
  282. vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) );
  283. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  284. //=== VK_EXT_full_screen_exclusive ===
  285. vkGetPhysicalDeviceSurfacePresentModes2EXT =
  286. PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) );
  287. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  288. //=== VK_EXT_headless_surface ===
  289. vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) );
  290. //=== VK_EXT_acquire_drm_display ===
  291. vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) );
  292. vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) );
  293. # if defined( VK_ENABLE_BETA_EXTENSIONS )
  294. //=== VK_KHR_video_encode_queue ===
  295. vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR(
  296. vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR" ) );
  297. # endif /*VK_ENABLE_BETA_EXTENSIONS*/
  298. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  299. //=== VK_NV_acquire_winrt_display ===
  300. vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) );
  301. vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) );
  302. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  303. # if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
  304. //=== VK_EXT_directfb_surface ===
  305. vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) );
  306. vkGetPhysicalDeviceDirectFBPresentationSupportEXT =
  307. PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) );
  308. # endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  309. # if defined( VK_USE_PLATFORM_SCREEN_QNX )
  310. //=== VK_QNX_screen_surface ===
  311. vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) );
  312. vkGetPhysicalDeviceScreenPresentationSupportQNX =
  313. PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) );
  314. # endif /*VK_USE_PLATFORM_SCREEN_QNX*/
  315. //=== VK_NV_optical_flow ===
  316. vkGetPhysicalDeviceOpticalFlowImageFormatsNV =
  317. PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) );
  318. //=== VK_KHR_cooperative_matrix ===
  319. vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR =
  320. PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR" ) );
  321. vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) );
  322. }
  323. public:
  324. //=== VK_VERSION_1_0 ===
  325. PFN_vkDestroyInstance vkDestroyInstance = 0;
  326. PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0;
  327. PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0;
  328. PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0;
  329. PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0;
  330. PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0;
  331. PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0;
  332. PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0;
  333. PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0;
  334. PFN_vkCreateDevice vkCreateDevice = 0;
  335. PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0;
  336. PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0;
  337. PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0;
  338. //=== VK_VERSION_1_1 ===
  339. PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0;
  340. PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0;
  341. PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0;
  342. PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0;
  343. PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0;
  344. PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0;
  345. PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0;
  346. PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0;
  347. PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0;
  348. PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0;
  349. PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0;
  350. //=== VK_VERSION_1_3 ===
  351. PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0;
  352. //=== VK_KHR_surface ===
  353. PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0;
  354. PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0;
  355. PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0;
  356. PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0;
  357. PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0;
  358. //=== VK_KHR_swapchain ===
  359. PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0;
  360. //=== VK_KHR_display ===
  361. PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0;
  362. PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0;
  363. PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0;
  364. PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0;
  365. PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0;
  366. PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0;
  367. PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0;
  368. # if defined( VK_USE_PLATFORM_XLIB_KHR )
  369. //=== VK_KHR_xlib_surface ===
  370. PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0;
  371. PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0;
  372. # else
  373. PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0;
  374. PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0;
  375. # endif /*VK_USE_PLATFORM_XLIB_KHR*/
  376. # if defined( VK_USE_PLATFORM_XCB_KHR )
  377. //=== VK_KHR_xcb_surface ===
  378. PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0;
  379. PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0;
  380. # else
  381. PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0;
  382. PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0;
  383. # endif /*VK_USE_PLATFORM_XCB_KHR*/
  384. # if defined( VK_USE_PLATFORM_WAYLAND_KHR )
  385. //=== VK_KHR_wayland_surface ===
  386. PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0;
  387. PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0;
  388. # else
  389. PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0;
  390. PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0;
  391. # endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  392. # if defined( VK_USE_PLATFORM_ANDROID_KHR )
  393. //=== VK_KHR_android_surface ===
  394. PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0;
  395. # else
  396. PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0;
  397. # endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  398. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  399. //=== VK_KHR_win32_surface ===
  400. PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0;
  401. PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0;
  402. # else
  403. PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0;
  404. PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0;
  405. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  406. //=== VK_EXT_debug_report ===
  407. PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0;
  408. PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0;
  409. PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0;
  410. //=== VK_KHR_video_queue ===
  411. PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0;
  412. PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0;
  413. # if defined( VK_USE_PLATFORM_GGP )
  414. //=== VK_GGP_stream_descriptor_surface ===
  415. PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0;
  416. # else
  417. PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0;
  418. # endif /*VK_USE_PLATFORM_GGP*/
  419. //=== VK_NV_external_memory_capabilities ===
  420. PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0;
  421. //=== VK_KHR_get_physical_device_properties2 ===
  422. PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0;
  423. PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0;
  424. PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0;
  425. PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0;
  426. PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0;
  427. PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0;
  428. PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0;
  429. # if defined( VK_USE_PLATFORM_VI_NN )
  430. //=== VK_NN_vi_surface ===
  431. PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0;
  432. # else
  433. PFN_dummy vkCreateViSurfaceNN_placeholder = 0;
  434. # endif /*VK_USE_PLATFORM_VI_NN*/
  435. //=== VK_KHR_device_group_creation ===
  436. PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0;
  437. //=== VK_KHR_external_memory_capabilities ===
  438. PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0;
  439. //=== VK_KHR_external_semaphore_capabilities ===
  440. PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0;
  441. //=== VK_EXT_direct_mode_display ===
  442. PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0;
  443. # if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
  444. //=== VK_EXT_acquire_xlib_display ===
  445. PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0;
  446. PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0;
  447. # else
  448. PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0;
  449. PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0;
  450. # endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
  451. //=== VK_EXT_display_surface_counter ===
  452. PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0;
  453. //=== VK_KHR_external_fence_capabilities ===
  454. PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0;
  455. //=== VK_KHR_performance_query ===
  456. PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0;
  457. PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0;
  458. //=== VK_KHR_get_surface_capabilities2 ===
  459. PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0;
  460. PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0;
  461. //=== VK_KHR_get_display_properties2 ===
  462. PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0;
  463. PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0;
  464. PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0;
  465. PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0;
  466. # if defined( VK_USE_PLATFORM_IOS_MVK )
  467. //=== VK_MVK_ios_surface ===
  468. PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0;
  469. # else
  470. PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0;
  471. # endif /*VK_USE_PLATFORM_IOS_MVK*/
  472. # if defined( VK_USE_PLATFORM_MACOS_MVK )
  473. //=== VK_MVK_macos_surface ===
  474. PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0;
  475. # else
  476. PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0;
  477. # endif /*VK_USE_PLATFORM_MACOS_MVK*/
  478. //=== VK_EXT_debug_utils ===
  479. PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0;
  480. PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0;
  481. PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0;
  482. //=== VK_EXT_sample_locations ===
  483. PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0;
  484. //=== VK_EXT_calibrated_timestamps ===
  485. PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0;
  486. # if defined( VK_USE_PLATFORM_FUCHSIA )
  487. //=== VK_FUCHSIA_imagepipe_surface ===
  488. PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0;
  489. # else
  490. PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0;
  491. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  492. # if defined( VK_USE_PLATFORM_METAL_EXT )
  493. //=== VK_EXT_metal_surface ===
  494. PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0;
  495. # else
  496. PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0;
  497. # endif /*VK_USE_PLATFORM_METAL_EXT*/
  498. //=== VK_KHR_fragment_shading_rate ===
  499. PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0;
  500. //=== VK_EXT_tooling_info ===
  501. PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0;
  502. //=== VK_NV_cooperative_matrix ===
  503. PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0;
  504. //=== VK_NV_coverage_reduction_mode ===
  505. PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0;
  506. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  507. //=== VK_EXT_full_screen_exclusive ===
  508. PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0;
  509. # else
  510. PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0;
  511. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  512. //=== VK_EXT_headless_surface ===
  513. PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0;
  514. //=== VK_EXT_acquire_drm_display ===
  515. PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0;
  516. PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0;
  517. # if defined( VK_ENABLE_BETA_EXTENSIONS )
  518. //=== VK_KHR_video_encode_queue ===
  519. PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR = 0;
  520. # else
  521. PFN_dummy vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR_placeholder = 0;
  522. # endif /*VK_ENABLE_BETA_EXTENSIONS*/
  523. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  524. //=== VK_NV_acquire_winrt_display ===
  525. PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0;
  526. PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0;
  527. # else
  528. PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0;
  529. PFN_dummy vkGetWinrtDisplayNV_placeholder = 0;
  530. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  531. # if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
  532. //=== VK_EXT_directfb_surface ===
  533. PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0;
  534. PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0;
  535. # else
  536. PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0;
  537. PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0;
  538. # endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  539. # if defined( VK_USE_PLATFORM_SCREEN_QNX )
  540. //=== VK_QNX_screen_surface ===
  541. PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0;
  542. PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0;
  543. # else
  544. PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0;
  545. PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0;
  546. # endif /*VK_USE_PLATFORM_SCREEN_QNX*/
  547. //=== VK_NV_optical_flow ===
  548. PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0;
  549. //=== VK_KHR_cooperative_matrix ===
  550. PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = 0;
  551. PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0;
  552. };
  553. class DeviceDispatcher : public DispatchLoaderBase
  554. {
  555. public:
  556. DeviceDispatcher( PFN_vkGetDeviceProcAddr getProcAddr, VkDevice device ) : vkGetDeviceProcAddr( getProcAddr )
  557. {
  558. //=== VK_VERSION_1_0 ===
  559. vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) );
  560. vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) );
  561. vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) );
  562. vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) );
  563. vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) );
  564. vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) );
  565. vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) );
  566. vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) );
  567. vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) );
  568. vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) );
  569. vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) );
  570. vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) );
  571. vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) );
  572. vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) );
  573. vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) );
  574. vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) );
  575. vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) );
  576. vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) );
  577. vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) );
  578. vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) );
  579. vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) );
  580. vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) );
  581. vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) );
  582. vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) );
  583. vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) );
  584. vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) );
  585. vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) );
  586. vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) );
  587. vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) );
  588. vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) );
  589. vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) );
  590. vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) );
  591. vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) );
  592. vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) );
  593. vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) );
  594. vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) );
  595. vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) );
  596. vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) );
  597. vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) );
  598. vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) );
  599. vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) );
  600. vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) );
  601. vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) );
  602. vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) );
  603. vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) );
  604. vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) );
  605. vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) );
  606. vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) );
  607. vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) );
  608. vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) );
  609. vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) );
  610. vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) );
  611. vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) );
  612. vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) );
  613. vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) );
  614. vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) );
  615. vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) );
  616. vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) );
  617. vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) );
  618. vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) );
  619. vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) );
  620. vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) );
  621. vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) );
  622. vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) );
  623. vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) );
  624. vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) );
  625. vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) );
  626. vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) );
  627. vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) );
  628. vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) );
  629. vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) );
  630. vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) );
  631. vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) );
  632. vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) );
  633. vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) );
  634. vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) );
  635. vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) );
  636. vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) );
  637. vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) );
  638. vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) );
  639. vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) );
  640. vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) );
  641. vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) );
  642. vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) );
  643. vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) );
  644. vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) );
  645. vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) );
  646. vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) );
  647. vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) );
  648. vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) );
  649. vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) );
  650. vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) );
  651. vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) );
  652. vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) );
  653. vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) );
  654. vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) );
  655. vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) );
  656. vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) );
  657. vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) );
  658. vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) );
  659. vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) );
  660. vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) );
  661. vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) );
  662. vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) );
  663. vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) );
  664. vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) );
  665. vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) );
  666. vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) );
  667. vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) );
  668. vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) );
  669. vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) );
  670. vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) );
  671. vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) );
  672. vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) );
  673. vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) );
  674. vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) );
  675. vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) );
  676. vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) );
  677. vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) );
  678. vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) );
  679. vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) );
  680. //=== VK_VERSION_1_1 ===
  681. vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) );
  682. vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) );
  683. vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) );
  684. vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) );
  685. vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) );
  686. vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) );
  687. vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) );
  688. vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) );
  689. vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) );
  690. vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) );
  691. vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) );
  692. vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) );
  693. vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) );
  694. vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) );
  695. vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) );
  696. vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) );
  697. //=== VK_VERSION_1_2 ===
  698. vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) );
  699. vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) );
  700. vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) );
  701. vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) );
  702. vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) );
  703. vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) );
  704. vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) );
  705. vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) );
  706. vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) );
  707. vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) );
  708. vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) );
  709. vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) );
  710. vkGetDeviceMemoryOpaqueCaptureAddress =
  711. PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
  712. //=== VK_VERSION_1_3 ===
  713. vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) );
  714. vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) );
  715. vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) );
  716. vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) );
  717. vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) );
  718. vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) );
  719. vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) );
  720. vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) );
  721. vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) );
  722. vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) );
  723. vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) );
  724. vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) );
  725. vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) );
  726. vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) );
  727. vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) );
  728. vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) );
  729. vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) );
  730. vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) );
  731. vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) );
  732. vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) );
  733. vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) );
  734. vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) );
  735. vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) );
  736. vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) );
  737. vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) );
  738. vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) );
  739. vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) );
  740. vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) );
  741. vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) );
  742. vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) );
  743. vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) );
  744. vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) );
  745. vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) );
  746. vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) );
  747. vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) );
  748. vkGetDeviceImageSparseMemoryRequirements =
  749. PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) );
  750. //=== VK_KHR_swapchain ===
  751. vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) );
  752. vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) );
  753. vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) );
  754. vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) );
  755. vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) );
  756. vkGetDeviceGroupPresentCapabilitiesKHR =
  757. PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
  758. vkGetDeviceGroupSurfacePresentModesKHR =
  759. PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
  760. vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) );
  761. //=== VK_KHR_display_swapchain ===
  762. vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) );
  763. //=== VK_EXT_debug_marker ===
  764. vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) );
  765. vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) );
  766. vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) );
  767. vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) );
  768. vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) );
  769. //=== VK_KHR_video_queue ===
  770. vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) );
  771. vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) );
  772. vkGetVideoSessionMemoryRequirementsKHR =
  773. PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) );
  774. vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) );
  775. vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) );
  776. vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) );
  777. vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) );
  778. vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) );
  779. vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) );
  780. vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) );
  781. //=== VK_KHR_video_decode_queue ===
  782. vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) );
  783. //=== VK_EXT_transform_feedback ===
  784. vkCmdBindTransformFeedbackBuffersEXT =
  785. PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) );
  786. vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) );
  787. vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) );
  788. vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) );
  789. vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) );
  790. vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) );
  791. //=== VK_NVX_binary_import ===
  792. vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) );
  793. vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) );
  794. vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) );
  795. vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) );
  796. vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) );
  797. //=== VK_NVX_image_view_handle ===
  798. vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) );
  799. vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) );
  800. //=== VK_AMD_draw_indirect_count ===
  801. vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) );
  802. if ( !vkCmdDrawIndirectCount )
  803. vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD;
  804. vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) );
  805. if ( !vkCmdDrawIndexedIndirectCount )
  806. vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD;
  807. //=== VK_AMD_shader_info ===
  808. vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) );
  809. //=== VK_KHR_dynamic_rendering ===
  810. vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) );
  811. if ( !vkCmdBeginRendering )
  812. vkCmdBeginRendering = vkCmdBeginRenderingKHR;
  813. vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) );
  814. if ( !vkCmdEndRendering )
  815. vkCmdEndRendering = vkCmdEndRenderingKHR;
  816. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  817. //=== VK_NV_external_memory_win32 ===
  818. vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) );
  819. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  820. //=== VK_KHR_device_group ===
  821. vkGetDeviceGroupPeerMemoryFeaturesKHR =
  822. PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
  823. if ( !vkGetDeviceGroupPeerMemoryFeatures )
  824. vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR;
  825. vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) );
  826. if ( !vkCmdSetDeviceMask )
  827. vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
  828. vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) );
  829. if ( !vkCmdDispatchBase )
  830. vkCmdDispatchBase = vkCmdDispatchBaseKHR;
  831. //=== VK_KHR_maintenance1 ===
  832. vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) );
  833. if ( !vkTrimCommandPool )
  834. vkTrimCommandPool = vkTrimCommandPoolKHR;
  835. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  836. //=== VK_KHR_external_memory_win32 ===
  837. vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) );
  838. vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) );
  839. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  840. //=== VK_KHR_external_memory_fd ===
  841. vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) );
  842. vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) );
  843. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  844. //=== VK_KHR_external_semaphore_win32 ===
  845. vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) );
  846. vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) );
  847. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  848. //=== VK_KHR_external_semaphore_fd ===
  849. vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) );
  850. vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) );
  851. //=== VK_KHR_push_descriptor ===
  852. vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) );
  853. vkCmdPushDescriptorSetWithTemplateKHR =
  854. PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
  855. //=== VK_EXT_conditional_rendering ===
  856. vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) );
  857. vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) );
  858. //=== VK_KHR_descriptor_update_template ===
  859. vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) );
  860. if ( !vkCreateDescriptorUpdateTemplate )
  861. vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR;
  862. vkDestroyDescriptorUpdateTemplateKHR =
  863. PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) );
  864. if ( !vkDestroyDescriptorUpdateTemplate )
  865. vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR;
  866. vkUpdateDescriptorSetWithTemplateKHR =
  867. PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) );
  868. if ( !vkUpdateDescriptorSetWithTemplate )
  869. vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR;
  870. //=== VK_NV_clip_space_w_scaling ===
  871. vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) );
  872. //=== VK_EXT_display_control ===
  873. vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) );
  874. vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) );
  875. vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) );
  876. vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) );
  877. //=== VK_GOOGLE_display_timing ===
  878. vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) );
  879. vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) );
  880. //=== VK_EXT_discard_rectangles ===
  881. vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) );
  882. vkCmdSetDiscardRectangleEnableEXT = PFN_vkCmdSetDiscardRectangleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEnableEXT" ) );
  883. vkCmdSetDiscardRectangleModeEXT = PFN_vkCmdSetDiscardRectangleModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleModeEXT" ) );
  884. //=== VK_EXT_hdr_metadata ===
  885. vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) );
  886. //=== VK_KHR_create_renderpass2 ===
  887. vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) );
  888. if ( !vkCreateRenderPass2 )
  889. vkCreateRenderPass2 = vkCreateRenderPass2KHR;
  890. vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) );
  891. if ( !vkCmdBeginRenderPass2 )
  892. vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR;
  893. vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) );
  894. if ( !vkCmdNextSubpass2 )
  895. vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
  896. vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) );
  897. if ( !vkCmdEndRenderPass2 )
  898. vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR;
  899. //=== VK_KHR_shared_presentable_image ===
  900. vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) );
  901. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  902. //=== VK_KHR_external_fence_win32 ===
  903. vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) );
  904. vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) );
  905. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  906. //=== VK_KHR_external_fence_fd ===
  907. vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) );
  908. vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) );
  909. //=== VK_KHR_performance_query ===
  910. vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) );
  911. vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) );
  912. //=== VK_EXT_debug_utils ===
  913. vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) );
  914. vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) );
  915. vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) );
  916. vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) );
  917. vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) );
  918. vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) );
  919. vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) );
  920. vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) );
  921. # if defined( VK_USE_PLATFORM_ANDROID_KHR )
  922. //=== VK_ANDROID_external_memory_android_hardware_buffer ===
  923. vkGetAndroidHardwareBufferPropertiesANDROID =
  924. PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
  925. vkGetMemoryAndroidHardwareBufferANDROID =
  926. PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
  927. # endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  928. # if defined( VK_ENABLE_BETA_EXTENSIONS )
  929. //=== VK_AMDX_shader_enqueue ===
  930. vkCreateExecutionGraphPipelinesAMDX = PFN_vkCreateExecutionGraphPipelinesAMDX( vkGetDeviceProcAddr( device, "vkCreateExecutionGraphPipelinesAMDX" ) );
  931. vkGetExecutionGraphPipelineScratchSizeAMDX =
  932. PFN_vkGetExecutionGraphPipelineScratchSizeAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineScratchSizeAMDX" ) );
  933. vkGetExecutionGraphPipelineNodeIndexAMDX =
  934. PFN_vkGetExecutionGraphPipelineNodeIndexAMDX( vkGetDeviceProcAddr( device, "vkGetExecutionGraphPipelineNodeIndexAMDX" ) );
  935. vkCmdInitializeGraphScratchMemoryAMDX =
  936. PFN_vkCmdInitializeGraphScratchMemoryAMDX( vkGetDeviceProcAddr( device, "vkCmdInitializeGraphScratchMemoryAMDX" ) );
  937. vkCmdDispatchGraphAMDX = PFN_vkCmdDispatchGraphAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphAMDX" ) );
  938. vkCmdDispatchGraphIndirectAMDX = PFN_vkCmdDispatchGraphIndirectAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectAMDX" ) );
  939. vkCmdDispatchGraphIndirectCountAMDX = PFN_vkCmdDispatchGraphIndirectCountAMDX( vkGetDeviceProcAddr( device, "vkCmdDispatchGraphIndirectCountAMDX" ) );
  940. # endif /*VK_ENABLE_BETA_EXTENSIONS*/
  941. //=== VK_EXT_sample_locations ===
  942. vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) );
  943. //=== VK_KHR_get_memory_requirements2 ===
  944. vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) );
  945. if ( !vkGetImageMemoryRequirements2 )
  946. vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR;
  947. vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) );
  948. if ( !vkGetBufferMemoryRequirements2 )
  949. vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR;
  950. vkGetImageSparseMemoryRequirements2KHR =
  951. PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) );
  952. if ( !vkGetImageSparseMemoryRequirements2 )
  953. vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR;
  954. //=== VK_KHR_acceleration_structure ===
  955. vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) );
  956. vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) );
  957. vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) );
  958. vkCmdBuildAccelerationStructuresIndirectKHR =
  959. PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) );
  960. vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) );
  961. vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) );
  962. vkCopyAccelerationStructureToMemoryKHR =
  963. PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) );
  964. vkCopyMemoryToAccelerationStructureKHR =
  965. PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) );
  966. vkWriteAccelerationStructuresPropertiesKHR =
  967. PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) );
  968. vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) );
  969. vkCmdCopyAccelerationStructureToMemoryKHR =
  970. PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
  971. vkCmdCopyMemoryToAccelerationStructureKHR =
  972. PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
  973. vkGetAccelerationStructureDeviceAddressKHR =
  974. PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) );
  975. vkCmdWriteAccelerationStructuresPropertiesKHR =
  976. PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
  977. vkGetDeviceAccelerationStructureCompatibilityKHR =
  978. PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
  979. vkGetAccelerationStructureBuildSizesKHR =
  980. PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) );
  981. //=== VK_KHR_ray_tracing_pipeline ===
  982. vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) );
  983. vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) );
  984. vkGetRayTracingShaderGroupHandlesKHR =
  985. PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) );
  986. vkGetRayTracingCaptureReplayShaderGroupHandlesKHR =
  987. PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
  988. vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) );
  989. vkGetRayTracingShaderGroupStackSizeKHR =
  990. PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) );
  991. vkCmdSetRayTracingPipelineStackSizeKHR =
  992. PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) );
  993. //=== VK_KHR_sampler_ycbcr_conversion ===
  994. vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) );
  995. if ( !vkCreateSamplerYcbcrConversion )
  996. vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR;
  997. vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) );
  998. if ( !vkDestroySamplerYcbcrConversion )
  999. vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR;
  1000. //=== VK_KHR_bind_memory2 ===
  1001. vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) );
  1002. if ( !vkBindBufferMemory2 )
  1003. vkBindBufferMemory2 = vkBindBufferMemory2KHR;
  1004. vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) );
  1005. if ( !vkBindImageMemory2 )
  1006. vkBindImageMemory2 = vkBindImageMemory2KHR;
  1007. //=== VK_EXT_image_drm_format_modifier ===
  1008. vkGetImageDrmFormatModifierPropertiesEXT =
  1009. PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
  1010. //=== VK_EXT_validation_cache ===
  1011. vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) );
  1012. vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) );
  1013. vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) );
  1014. vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) );
  1015. //=== VK_NV_shading_rate_image ===
  1016. vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) );
  1017. vkCmdSetViewportShadingRatePaletteNV =
  1018. PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) );
  1019. vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) );
  1020. //=== VK_NV_ray_tracing ===
  1021. vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) );
  1022. vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) );
  1023. vkGetAccelerationStructureMemoryRequirementsNV =
  1024. PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
  1025. vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) );
  1026. vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) );
  1027. vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) );
  1028. vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) );
  1029. vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) );
  1030. vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) );
  1031. if ( !vkGetRayTracingShaderGroupHandlesKHR )
  1032. vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV;
  1033. vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) );
  1034. vkCmdWriteAccelerationStructuresPropertiesNV =
  1035. PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
  1036. vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) );
  1037. //=== VK_KHR_maintenance3 ===
  1038. vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) );
  1039. if ( !vkGetDescriptorSetLayoutSupport )
  1040. vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR;
  1041. //=== VK_KHR_draw_indirect_count ===
  1042. vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) );
  1043. if ( !vkCmdDrawIndirectCount )
  1044. vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR;
  1045. vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) );
  1046. if ( !vkCmdDrawIndexedIndirectCount )
  1047. vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR;
  1048. //=== VK_EXT_external_memory_host ===
  1049. vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) );
  1050. //=== VK_AMD_buffer_marker ===
  1051. vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) );
  1052. //=== VK_EXT_calibrated_timestamps ===
  1053. vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) );
  1054. //=== VK_NV_mesh_shader ===
  1055. vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) );
  1056. vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) );
  1057. vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) );
  1058. //=== VK_NV_scissor_exclusive ===
  1059. vkCmdSetExclusiveScissorEnableNV = PFN_vkCmdSetExclusiveScissorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorEnableNV" ) );
  1060. vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) );
  1061. //=== VK_NV_device_diagnostic_checkpoints ===
  1062. vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) );
  1063. vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) );
  1064. //=== VK_KHR_timeline_semaphore ===
  1065. vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) );
  1066. if ( !vkGetSemaphoreCounterValue )
  1067. vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR;
  1068. vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) );
  1069. if ( !vkWaitSemaphores )
  1070. vkWaitSemaphores = vkWaitSemaphoresKHR;
  1071. vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) );
  1072. if ( !vkSignalSemaphore )
  1073. vkSignalSemaphore = vkSignalSemaphoreKHR;
  1074. //=== VK_INTEL_performance_query ===
  1075. vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) );
  1076. vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) );
  1077. vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) );
  1078. vkCmdSetPerformanceStreamMarkerINTEL =
  1079. PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
  1080. vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) );
  1081. vkAcquirePerformanceConfigurationINTEL =
  1082. PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) );
  1083. vkReleasePerformanceConfigurationINTEL =
  1084. PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) );
  1085. vkQueueSetPerformanceConfigurationINTEL =
  1086. PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) );
  1087. vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) );
  1088. //=== VK_AMD_display_native_hdr ===
  1089. vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) );
  1090. //=== VK_KHR_fragment_shading_rate ===
  1091. vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) );
  1092. //=== VK_EXT_buffer_device_address ===
  1093. vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) );
  1094. if ( !vkGetBufferDeviceAddress )
  1095. vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT;
  1096. //=== VK_KHR_present_wait ===
  1097. vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) );
  1098. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  1099. //=== VK_EXT_full_screen_exclusive ===
  1100. vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) );
  1101. vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) );
  1102. vkGetDeviceGroupSurfacePresentModes2EXT =
  1103. PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
  1104. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  1105. //=== VK_KHR_buffer_device_address ===
  1106. vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) );
  1107. if ( !vkGetBufferDeviceAddress )
  1108. vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR;
  1109. vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) );
  1110. if ( !vkGetBufferOpaqueCaptureAddress )
  1111. vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR;
  1112. vkGetDeviceMemoryOpaqueCaptureAddressKHR =
  1113. PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
  1114. if ( !vkGetDeviceMemoryOpaqueCaptureAddress )
  1115. vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR;
  1116. //=== VK_EXT_line_rasterization ===
  1117. vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) );
  1118. //=== VK_EXT_host_query_reset ===
  1119. vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) );
  1120. if ( !vkResetQueryPool )
  1121. vkResetQueryPool = vkResetQueryPoolEXT;
  1122. //=== VK_EXT_extended_dynamic_state ===
  1123. vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) );
  1124. if ( !vkCmdSetCullMode )
  1125. vkCmdSetCullMode = vkCmdSetCullModeEXT;
  1126. vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) );
  1127. if ( !vkCmdSetFrontFace )
  1128. vkCmdSetFrontFace = vkCmdSetFrontFaceEXT;
  1129. vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) );
  1130. if ( !vkCmdSetPrimitiveTopology )
  1131. vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT;
  1132. vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) );
  1133. if ( !vkCmdSetViewportWithCount )
  1134. vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT;
  1135. vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) );
  1136. if ( !vkCmdSetScissorWithCount )
  1137. vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT;
  1138. vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) );
  1139. if ( !vkCmdBindVertexBuffers2 )
  1140. vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT;
  1141. vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) );
  1142. if ( !vkCmdSetDepthTestEnable )
  1143. vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT;
  1144. vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) );
  1145. if ( !vkCmdSetDepthWriteEnable )
  1146. vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT;
  1147. vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) );
  1148. if ( !vkCmdSetDepthCompareOp )
  1149. vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT;
  1150. vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) );
  1151. if ( !vkCmdSetDepthBoundsTestEnable )
  1152. vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT;
  1153. vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) );
  1154. if ( !vkCmdSetStencilTestEnable )
  1155. vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT;
  1156. vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) );
  1157. if ( !vkCmdSetStencilOp )
  1158. vkCmdSetStencilOp = vkCmdSetStencilOpEXT;
  1159. //=== VK_KHR_deferred_host_operations ===
  1160. vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) );
  1161. vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) );
  1162. vkGetDeferredOperationMaxConcurrencyKHR =
  1163. PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
  1164. vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) );
  1165. vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) );
  1166. //=== VK_KHR_pipeline_executable_properties ===
  1167. vkGetPipelineExecutablePropertiesKHR =
  1168. PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) );
  1169. vkGetPipelineExecutableStatisticsKHR =
  1170. PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) );
  1171. vkGetPipelineExecutableInternalRepresentationsKHR =
  1172. PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
  1173. //=== VK_EXT_host_image_copy ===
  1174. vkCopyMemoryToImageEXT = PFN_vkCopyMemoryToImageEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToImageEXT" ) );
  1175. vkCopyImageToMemoryEXT = PFN_vkCopyImageToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyImageToMemoryEXT" ) );
  1176. vkCopyImageToImageEXT = PFN_vkCopyImageToImageEXT( vkGetDeviceProcAddr( device, "vkCopyImageToImageEXT" ) );
  1177. vkTransitionImageLayoutEXT = PFN_vkTransitionImageLayoutEXT( vkGetDeviceProcAddr( device, "vkTransitionImageLayoutEXT" ) );
  1178. vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) );
  1179. if ( !vkGetImageSubresourceLayout2KHR )
  1180. vkGetImageSubresourceLayout2KHR = vkGetImageSubresourceLayout2EXT;
  1181. //=== VK_KHR_map_memory2 ===
  1182. vkMapMemory2KHR = PFN_vkMapMemory2KHR( vkGetDeviceProcAddr( device, "vkMapMemory2KHR" ) );
  1183. vkUnmapMemory2KHR = PFN_vkUnmapMemory2KHR( vkGetDeviceProcAddr( device, "vkUnmapMemory2KHR" ) );
  1184. //=== VK_EXT_swapchain_maintenance1 ===
  1185. vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) );
  1186. //=== VK_NV_device_generated_commands ===
  1187. vkGetGeneratedCommandsMemoryRequirementsNV =
  1188. PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
  1189. vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) );
  1190. vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) );
  1191. vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) );
  1192. vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) );
  1193. vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) );
  1194. //=== VK_EXT_depth_bias_control ===
  1195. vkCmdSetDepthBias2EXT = PFN_vkCmdSetDepthBias2EXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias2EXT" ) );
  1196. //=== VK_EXT_private_data ===
  1197. vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) );
  1198. if ( !vkCreatePrivateDataSlot )
  1199. vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT;
  1200. vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) );
  1201. if ( !vkDestroyPrivateDataSlot )
  1202. vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT;
  1203. vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) );
  1204. if ( !vkSetPrivateData )
  1205. vkSetPrivateData = vkSetPrivateDataEXT;
  1206. vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) );
  1207. if ( !vkGetPrivateData )
  1208. vkGetPrivateData = vkGetPrivateDataEXT;
  1209. # if defined( VK_ENABLE_BETA_EXTENSIONS )
  1210. //=== VK_KHR_video_encode_queue ===
  1211. vkGetEncodedVideoSessionParametersKHR =
  1212. PFN_vkGetEncodedVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkGetEncodedVideoSessionParametersKHR" ) );
  1213. vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) );
  1214. # endif /*VK_ENABLE_BETA_EXTENSIONS*/
  1215. # if defined( VK_USE_PLATFORM_METAL_EXT )
  1216. //=== VK_EXT_metal_objects ===
  1217. vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) );
  1218. # endif /*VK_USE_PLATFORM_METAL_EXT*/
  1219. //=== VK_KHR_synchronization2 ===
  1220. vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) );
  1221. if ( !vkCmdSetEvent2 )
  1222. vkCmdSetEvent2 = vkCmdSetEvent2KHR;
  1223. vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) );
  1224. if ( !vkCmdResetEvent2 )
  1225. vkCmdResetEvent2 = vkCmdResetEvent2KHR;
  1226. vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) );
  1227. if ( !vkCmdWaitEvents2 )
  1228. vkCmdWaitEvents2 = vkCmdWaitEvents2KHR;
  1229. vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) );
  1230. if ( !vkCmdPipelineBarrier2 )
  1231. vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR;
  1232. vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) );
  1233. if ( !vkCmdWriteTimestamp2 )
  1234. vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR;
  1235. vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) );
  1236. if ( !vkQueueSubmit2 )
  1237. vkQueueSubmit2 = vkQueueSubmit2KHR;
  1238. vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) );
  1239. vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) );
  1240. //=== VK_EXT_descriptor_buffer ===
  1241. vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) );
  1242. vkGetDescriptorSetLayoutBindingOffsetEXT =
  1243. PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) );
  1244. vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) );
  1245. vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) );
  1246. vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) );
  1247. vkCmdBindDescriptorBufferEmbeddedSamplersEXT =
  1248. PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) );
  1249. vkGetBufferOpaqueCaptureDescriptorDataEXT =
  1250. PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) );
  1251. vkGetImageOpaqueCaptureDescriptorDataEXT =
  1252. PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) );
  1253. vkGetImageViewOpaqueCaptureDescriptorDataEXT =
  1254. PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) );
  1255. vkGetSamplerOpaqueCaptureDescriptorDataEXT =
  1256. PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) );
  1257. vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT(
  1258. vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) );
  1259. //=== VK_NV_fragment_shading_rate_enums ===
  1260. vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) );
  1261. //=== VK_EXT_mesh_shader ===
  1262. vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) );
  1263. vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) );
  1264. vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) );
  1265. //=== VK_KHR_copy_commands2 ===
  1266. vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) );
  1267. if ( !vkCmdCopyBuffer2 )
  1268. vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR;
  1269. vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) );
  1270. if ( !vkCmdCopyImage2 )
  1271. vkCmdCopyImage2 = vkCmdCopyImage2KHR;
  1272. vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) );
  1273. if ( !vkCmdCopyBufferToImage2 )
  1274. vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR;
  1275. vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) );
  1276. if ( !vkCmdCopyImageToBuffer2 )
  1277. vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR;
  1278. vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) );
  1279. if ( !vkCmdBlitImage2 )
  1280. vkCmdBlitImage2 = vkCmdBlitImage2KHR;
  1281. vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) );
  1282. if ( !vkCmdResolveImage2 )
  1283. vkCmdResolveImage2 = vkCmdResolveImage2KHR;
  1284. //=== VK_EXT_device_fault ===
  1285. vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) );
  1286. //=== VK_EXT_vertex_input_dynamic_state ===
  1287. vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) );
  1288. # if defined( VK_USE_PLATFORM_FUCHSIA )
  1289. //=== VK_FUCHSIA_external_memory ===
  1290. vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) );
  1291. vkGetMemoryZirconHandlePropertiesFUCHSIA =
  1292. PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) );
  1293. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  1294. # if defined( VK_USE_PLATFORM_FUCHSIA )
  1295. //=== VK_FUCHSIA_external_semaphore ===
  1296. vkImportSemaphoreZirconHandleFUCHSIA =
  1297. PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) );
  1298. vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) );
  1299. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  1300. # if defined( VK_USE_PLATFORM_FUCHSIA )
  1301. //=== VK_FUCHSIA_buffer_collection ===
  1302. vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) );
  1303. vkSetBufferCollectionImageConstraintsFUCHSIA =
  1304. PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) );
  1305. vkSetBufferCollectionBufferConstraintsFUCHSIA =
  1306. PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) );
  1307. vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) );
  1308. vkGetBufferCollectionPropertiesFUCHSIA =
  1309. PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) );
  1310. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  1311. //=== VK_HUAWEI_subpass_shading ===
  1312. vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI =
  1313. PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) );
  1314. vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) );
  1315. //=== VK_HUAWEI_invocation_mask ===
  1316. vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) );
  1317. //=== VK_NV_external_memory_rdma ===
  1318. vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) );
  1319. //=== VK_EXT_pipeline_properties ===
  1320. vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) );
  1321. //=== VK_EXT_extended_dynamic_state2 ===
  1322. vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) );
  1323. vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) );
  1324. if ( !vkCmdSetRasterizerDiscardEnable )
  1325. vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT;
  1326. vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) );
  1327. if ( !vkCmdSetDepthBiasEnable )
  1328. vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT;
  1329. vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) );
  1330. vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) );
  1331. if ( !vkCmdSetPrimitiveRestartEnable )
  1332. vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT;
  1333. //=== VK_EXT_color_write_enable ===
  1334. vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) );
  1335. //=== VK_KHR_ray_tracing_maintenance1 ===
  1336. vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) );
  1337. //=== VK_EXT_multi_draw ===
  1338. vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) );
  1339. vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) );
  1340. //=== VK_EXT_opacity_micromap ===
  1341. vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) );
  1342. vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) );
  1343. vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) );
  1344. vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) );
  1345. vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) );
  1346. vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) );
  1347. vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) );
  1348. vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) );
  1349. vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) );
  1350. vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) );
  1351. vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) );
  1352. vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) );
  1353. vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) );
  1354. vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) );
  1355. //=== VK_HUAWEI_cluster_culling_shader ===
  1356. vkCmdDrawClusterHUAWEI = PFN_vkCmdDrawClusterHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterHUAWEI" ) );
  1357. vkCmdDrawClusterIndirectHUAWEI = PFN_vkCmdDrawClusterIndirectHUAWEI( vkGetDeviceProcAddr( device, "vkCmdDrawClusterIndirectHUAWEI" ) );
  1358. //=== VK_EXT_pageable_device_local_memory ===
  1359. vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) );
  1360. //=== VK_KHR_maintenance4 ===
  1361. vkGetDeviceBufferMemoryRequirementsKHR =
  1362. PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) );
  1363. if ( !vkGetDeviceBufferMemoryRequirements )
  1364. vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR;
  1365. vkGetDeviceImageMemoryRequirementsKHR =
  1366. PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) );
  1367. if ( !vkGetDeviceImageMemoryRequirements )
  1368. vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR;
  1369. vkGetDeviceImageSparseMemoryRequirementsKHR =
  1370. PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) );
  1371. if ( !vkGetDeviceImageSparseMemoryRequirements )
  1372. vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR;
  1373. //=== VK_VALVE_descriptor_set_host_mapping ===
  1374. vkGetDescriptorSetLayoutHostMappingInfoVALVE =
  1375. PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) );
  1376. vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) );
  1377. //=== VK_NV_copy_memory_indirect ===
  1378. vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) );
  1379. vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) );
  1380. //=== VK_NV_memory_decompression ===
  1381. vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) );
  1382. vkCmdDecompressMemoryIndirectCountNV =
  1383. PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) );
  1384. //=== VK_NV_device_generated_commands_compute ===
  1385. vkGetPipelineIndirectMemoryRequirementsNV =
  1386. PFN_vkGetPipelineIndirectMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectMemoryRequirementsNV" ) );
  1387. vkCmdUpdatePipelineIndirectBufferNV = PFN_vkCmdUpdatePipelineIndirectBufferNV( vkGetDeviceProcAddr( device, "vkCmdUpdatePipelineIndirectBufferNV" ) );
  1388. vkGetPipelineIndirectDeviceAddressNV =
  1389. PFN_vkGetPipelineIndirectDeviceAddressNV( vkGetDeviceProcAddr( device, "vkGetPipelineIndirectDeviceAddressNV" ) );
  1390. //=== VK_EXT_extended_dynamic_state3 ===
  1391. vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) );
  1392. vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) );
  1393. vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) );
  1394. vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) );
  1395. vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) );
  1396. vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) );
  1397. vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) );
  1398. vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) );
  1399. vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) );
  1400. vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) );
  1401. vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) );
  1402. vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) );
  1403. vkCmdSetConservativeRasterizationModeEXT =
  1404. PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) );
  1405. vkCmdSetExtraPrimitiveOverestimationSizeEXT =
  1406. PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) );
  1407. vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) );
  1408. vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) );
  1409. vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) );
  1410. vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) );
  1411. vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) );
  1412. vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) );
  1413. vkCmdSetDepthClipNegativeOneToOneEXT =
  1414. PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) );
  1415. vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) );
  1416. vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) );
  1417. vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) );
  1418. vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) );
  1419. vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) );
  1420. vkCmdSetCoverageModulationTableEnableNV =
  1421. PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) );
  1422. vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) );
  1423. vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) );
  1424. vkCmdSetRepresentativeFragmentTestEnableNV =
  1425. PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) );
  1426. vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) );
  1427. //=== VK_EXT_shader_module_identifier ===
  1428. vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) );
  1429. vkGetShaderModuleCreateInfoIdentifierEXT =
  1430. PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) );
  1431. //=== VK_NV_optical_flow ===
  1432. vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) );
  1433. vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) );
  1434. vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) );
  1435. vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) );
  1436. //=== VK_KHR_maintenance5 ===
  1437. vkCmdBindIndexBuffer2KHR = PFN_vkCmdBindIndexBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer2KHR" ) );
  1438. vkGetRenderingAreaGranularityKHR = PFN_vkGetRenderingAreaGranularityKHR( vkGetDeviceProcAddr( device, "vkGetRenderingAreaGranularityKHR" ) );
  1439. vkGetDeviceImageSubresourceLayoutKHR =
  1440. PFN_vkGetDeviceImageSubresourceLayoutKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSubresourceLayoutKHR" ) );
  1441. vkGetImageSubresourceLayout2KHR = PFN_vkGetImageSubresourceLayout2KHR( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2KHR" ) );
  1442. //=== VK_EXT_shader_object ===
  1443. vkCreateShadersEXT = PFN_vkCreateShadersEXT( vkGetDeviceProcAddr( device, "vkCreateShadersEXT" ) );
  1444. vkDestroyShaderEXT = PFN_vkDestroyShaderEXT( vkGetDeviceProcAddr( device, "vkDestroyShaderEXT" ) );
  1445. vkGetShaderBinaryDataEXT = PFN_vkGetShaderBinaryDataEXT( vkGetDeviceProcAddr( device, "vkGetShaderBinaryDataEXT" ) );
  1446. vkCmdBindShadersEXT = PFN_vkCmdBindShadersEXT( vkGetDeviceProcAddr( device, "vkCmdBindShadersEXT" ) );
  1447. //=== VK_QCOM_tile_properties ===
  1448. vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) );
  1449. vkGetDynamicRenderingTilePropertiesQCOM =
  1450. PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) );
  1451. //=== VK_NV_low_latency2 ===
  1452. vkSetLatencySleepModeNV = PFN_vkSetLatencySleepModeNV( vkGetDeviceProcAddr( device, "vkSetLatencySleepModeNV" ) );
  1453. vkLatencySleepNV = PFN_vkLatencySleepNV( vkGetDeviceProcAddr( device, "vkLatencySleepNV" ) );
  1454. vkSetLatencyMarkerNV = PFN_vkSetLatencyMarkerNV( vkGetDeviceProcAddr( device, "vkSetLatencyMarkerNV" ) );
  1455. vkGetLatencyTimingsNV = PFN_vkGetLatencyTimingsNV( vkGetDeviceProcAddr( device, "vkGetLatencyTimingsNV" ) );
  1456. vkQueueNotifyOutOfBandNV = PFN_vkQueueNotifyOutOfBandNV( vkGetDeviceProcAddr( device, "vkQueueNotifyOutOfBandNV" ) );
  1457. //=== VK_EXT_attachment_feedback_loop_dynamic_state ===
  1458. vkCmdSetAttachmentFeedbackLoopEnableEXT =
  1459. PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAttachmentFeedbackLoopEnableEXT" ) );
  1460. # if defined( VK_USE_PLATFORM_SCREEN_QNX )
  1461. //=== VK_QNX_external_memory_screen_buffer ===
  1462. vkGetScreenBufferPropertiesQNX = PFN_vkGetScreenBufferPropertiesQNX( vkGetDeviceProcAddr( device, "vkGetScreenBufferPropertiesQNX" ) );
  1463. # endif /*VK_USE_PLATFORM_SCREEN_QNX*/
  1464. }
  1465. public:
  1466. //=== VK_VERSION_1_0 ===
  1467. PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0;
  1468. PFN_vkDestroyDevice vkDestroyDevice = 0;
  1469. PFN_vkGetDeviceQueue vkGetDeviceQueue = 0;
  1470. PFN_vkQueueSubmit vkQueueSubmit = 0;
  1471. PFN_vkQueueWaitIdle vkQueueWaitIdle = 0;
  1472. PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0;
  1473. PFN_vkAllocateMemory vkAllocateMemory = 0;
  1474. PFN_vkFreeMemory vkFreeMemory = 0;
  1475. PFN_vkMapMemory vkMapMemory = 0;
  1476. PFN_vkUnmapMemory vkUnmapMemory = 0;
  1477. PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0;
  1478. PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0;
  1479. PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0;
  1480. PFN_vkBindBufferMemory vkBindBufferMemory = 0;
  1481. PFN_vkBindImageMemory vkBindImageMemory = 0;
  1482. PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0;
  1483. PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0;
  1484. PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0;
  1485. PFN_vkQueueBindSparse vkQueueBindSparse = 0;
  1486. PFN_vkCreateFence vkCreateFence = 0;
  1487. PFN_vkDestroyFence vkDestroyFence = 0;
  1488. PFN_vkResetFences vkResetFences = 0;
  1489. PFN_vkGetFenceStatus vkGetFenceStatus = 0;
  1490. PFN_vkWaitForFences vkWaitForFences = 0;
  1491. PFN_vkCreateSemaphore vkCreateSemaphore = 0;
  1492. PFN_vkDestroySemaphore vkDestroySemaphore = 0;
  1493. PFN_vkCreateEvent vkCreateEvent = 0;
  1494. PFN_vkDestroyEvent vkDestroyEvent = 0;
  1495. PFN_vkGetEventStatus vkGetEventStatus = 0;
  1496. PFN_vkSetEvent vkSetEvent = 0;
  1497. PFN_vkResetEvent vkResetEvent = 0;
  1498. PFN_vkCreateQueryPool vkCreateQueryPool = 0;
  1499. PFN_vkDestroyQueryPool vkDestroyQueryPool = 0;
  1500. PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0;
  1501. PFN_vkCreateBuffer vkCreateBuffer = 0;
  1502. PFN_vkDestroyBuffer vkDestroyBuffer = 0;
  1503. PFN_vkCreateBufferView vkCreateBufferView = 0;
  1504. PFN_vkDestroyBufferView vkDestroyBufferView = 0;
  1505. PFN_vkCreateImage vkCreateImage = 0;
  1506. PFN_vkDestroyImage vkDestroyImage = 0;
  1507. PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0;
  1508. PFN_vkCreateImageView vkCreateImageView = 0;
  1509. PFN_vkDestroyImageView vkDestroyImageView = 0;
  1510. PFN_vkCreateShaderModule vkCreateShaderModule = 0;
  1511. PFN_vkDestroyShaderModule vkDestroyShaderModule = 0;
  1512. PFN_vkCreatePipelineCache vkCreatePipelineCache = 0;
  1513. PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0;
  1514. PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0;
  1515. PFN_vkMergePipelineCaches vkMergePipelineCaches = 0;
  1516. PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0;
  1517. PFN_vkCreateComputePipelines vkCreateComputePipelines = 0;
  1518. PFN_vkDestroyPipeline vkDestroyPipeline = 0;
  1519. PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0;
  1520. PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0;
  1521. PFN_vkCreateSampler vkCreateSampler = 0;
  1522. PFN_vkDestroySampler vkDestroySampler = 0;
  1523. PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0;
  1524. PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0;
  1525. PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0;
  1526. PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0;
  1527. PFN_vkResetDescriptorPool vkResetDescriptorPool = 0;
  1528. PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0;
  1529. PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0;
  1530. PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0;
  1531. PFN_vkCreateFramebuffer vkCreateFramebuffer = 0;
  1532. PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0;
  1533. PFN_vkCreateRenderPass vkCreateRenderPass = 0;
  1534. PFN_vkDestroyRenderPass vkDestroyRenderPass = 0;
  1535. PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0;
  1536. PFN_vkCreateCommandPool vkCreateCommandPool = 0;
  1537. PFN_vkDestroyCommandPool vkDestroyCommandPool = 0;
  1538. PFN_vkResetCommandPool vkResetCommandPool = 0;
  1539. PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0;
  1540. PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0;
  1541. PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0;
  1542. PFN_vkEndCommandBuffer vkEndCommandBuffer = 0;
  1543. PFN_vkResetCommandBuffer vkResetCommandBuffer = 0;
  1544. PFN_vkCmdBindPipeline vkCmdBindPipeline = 0;
  1545. PFN_vkCmdSetViewport vkCmdSetViewport = 0;
  1546. PFN_vkCmdSetScissor vkCmdSetScissor = 0;
  1547. PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0;
  1548. PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0;
  1549. PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0;
  1550. PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0;
  1551. PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0;
  1552. PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0;
  1553. PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0;
  1554. PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0;
  1555. PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0;
  1556. PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0;
  1557. PFN_vkCmdDraw vkCmdDraw = 0;
  1558. PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0;
  1559. PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0;
  1560. PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0;
  1561. PFN_vkCmdDispatch vkCmdDispatch = 0;
  1562. PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0;
  1563. PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0;
  1564. PFN_vkCmdCopyImage vkCmdCopyImage = 0;
  1565. PFN_vkCmdBlitImage vkCmdBlitImage = 0;
  1566. PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0;
  1567. PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0;
  1568. PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0;
  1569. PFN_vkCmdFillBuffer vkCmdFillBuffer = 0;
  1570. PFN_vkCmdClearColorImage vkCmdClearColorImage = 0;
  1571. PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0;
  1572. PFN_vkCmdClearAttachments vkCmdClearAttachments = 0;
  1573. PFN_vkCmdResolveImage vkCmdResolveImage = 0;
  1574. PFN_vkCmdSetEvent vkCmdSetEvent = 0;
  1575. PFN_vkCmdResetEvent vkCmdResetEvent = 0;
  1576. PFN_vkCmdWaitEvents vkCmdWaitEvents = 0;
  1577. PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0;
  1578. PFN_vkCmdBeginQuery vkCmdBeginQuery = 0;
  1579. PFN_vkCmdEndQuery vkCmdEndQuery = 0;
  1580. PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0;
  1581. PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0;
  1582. PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0;
  1583. PFN_vkCmdPushConstants vkCmdPushConstants = 0;
  1584. PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0;
  1585. PFN_vkCmdNextSubpass vkCmdNextSubpass = 0;
  1586. PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0;
  1587. PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0;
  1588. //=== VK_VERSION_1_1 ===
  1589. PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0;
  1590. PFN_vkBindImageMemory2 vkBindImageMemory2 = 0;
  1591. PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0;
  1592. PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0;
  1593. PFN_vkCmdDispatchBase vkCmdDispatchBase = 0;
  1594. PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0;
  1595. PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0;
  1596. PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0;
  1597. PFN_vkTrimCommandPool vkTrimCommandPool = 0;
  1598. PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0;
  1599. PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0;
  1600. PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0;
  1601. PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0;
  1602. PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0;
  1603. PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0;
  1604. PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0;
  1605. //=== VK_VERSION_1_2 ===
  1606. PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0;
  1607. PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0;
  1608. PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0;
  1609. PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0;
  1610. PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0;
  1611. PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0;
  1612. PFN_vkResetQueryPool vkResetQueryPool = 0;
  1613. PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0;
  1614. PFN_vkWaitSemaphores vkWaitSemaphores = 0;
  1615. PFN_vkSignalSemaphore vkSignalSemaphore = 0;
  1616. PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0;
  1617. PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0;
  1618. PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0;
  1619. //=== VK_VERSION_1_3 ===
  1620. PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0;
  1621. PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0;
  1622. PFN_vkSetPrivateData vkSetPrivateData = 0;
  1623. PFN_vkGetPrivateData vkGetPrivateData = 0;
  1624. PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0;
  1625. PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0;
  1626. PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0;
  1627. PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0;
  1628. PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0;
  1629. PFN_vkQueueSubmit2 vkQueueSubmit2 = 0;
  1630. PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0;
  1631. PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0;
  1632. PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0;
  1633. PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0;
  1634. PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0;
  1635. PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0;
  1636. PFN_vkCmdBeginRendering vkCmdBeginRendering = 0;
  1637. PFN_vkCmdEndRendering vkCmdEndRendering = 0;
  1638. PFN_vkCmdSetCullMode vkCmdSetCullMode = 0;
  1639. PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0;
  1640. PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0;
  1641. PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0;
  1642. PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0;
  1643. PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0;
  1644. PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0;
  1645. PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0;
  1646. PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0;
  1647. PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0;
  1648. PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0;
  1649. PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0;
  1650. PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0;
  1651. PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0;
  1652. PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0;
  1653. PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0;
  1654. PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0;
  1655. PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0;
  1656. //=== VK_KHR_swapchain ===
  1657. PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0;
  1658. PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0;
  1659. PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0;
  1660. PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0;
  1661. PFN_vkQueuePresentKHR vkQueuePresentKHR = 0;
  1662. PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0;
  1663. PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0;
  1664. PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0;
  1665. //=== VK_KHR_display_swapchain ===
  1666. PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0;
  1667. //=== VK_EXT_debug_marker ===
  1668. PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0;
  1669. PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0;
  1670. PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0;
  1671. PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0;
  1672. PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0;
  1673. //=== VK_KHR_video_queue ===
  1674. PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0;
  1675. PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0;
  1676. PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0;
  1677. PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0;
  1678. PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0;
  1679. PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0;
  1680. PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0;
  1681. PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0;
  1682. PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0;
  1683. PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0;
  1684. //=== VK_KHR_video_decode_queue ===
  1685. PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0;
  1686. //=== VK_EXT_transform_feedback ===
  1687. PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0;
  1688. PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0;
  1689. PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0;
  1690. PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0;
  1691. PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0;
  1692. PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0;
  1693. //=== VK_NVX_binary_import ===
  1694. PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0;
  1695. PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0;
  1696. PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0;
  1697. PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0;
  1698. PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0;
  1699. //=== VK_NVX_image_view_handle ===
  1700. PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0;
  1701. PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0;
  1702. //=== VK_AMD_draw_indirect_count ===
  1703. PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0;
  1704. PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0;
  1705. //=== VK_AMD_shader_info ===
  1706. PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0;
  1707. //=== VK_KHR_dynamic_rendering ===
  1708. PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0;
  1709. PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0;
  1710. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  1711. //=== VK_NV_external_memory_win32 ===
  1712. PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0;
  1713. # else
  1714. PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0;
  1715. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  1716. //=== VK_KHR_device_group ===
  1717. PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0;
  1718. PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0;
  1719. PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0;
  1720. //=== VK_KHR_maintenance1 ===
  1721. PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0;
  1722. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  1723. //=== VK_KHR_external_memory_win32 ===
  1724. PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0;
  1725. PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0;
  1726. # else
  1727. PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0;
  1728. PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0;
  1729. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  1730. //=== VK_KHR_external_memory_fd ===
  1731. PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0;
  1732. PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0;
  1733. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  1734. //=== VK_KHR_external_semaphore_win32 ===
  1735. PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0;
  1736. PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0;
  1737. # else
  1738. PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0;
  1739. PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0;
  1740. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  1741. //=== VK_KHR_external_semaphore_fd ===
  1742. PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0;
  1743. PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0;
  1744. //=== VK_KHR_push_descriptor ===
  1745. PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0;
  1746. PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0;
  1747. //=== VK_EXT_conditional_rendering ===
  1748. PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0;
  1749. PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0;
  1750. //=== VK_KHR_descriptor_update_template ===
  1751. PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0;
  1752. PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0;
  1753. PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0;
  1754. //=== VK_NV_clip_space_w_scaling ===
  1755. PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0;
  1756. //=== VK_EXT_display_control ===
  1757. PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0;
  1758. PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0;
  1759. PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0;
  1760. PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0;
  1761. //=== VK_GOOGLE_display_timing ===
  1762. PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0;
  1763. PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0;
  1764. //=== VK_EXT_discard_rectangles ===
  1765. PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0;
  1766. PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT = 0;
  1767. PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT = 0;
  1768. //=== VK_EXT_hdr_metadata ===
  1769. PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0;
  1770. //=== VK_KHR_create_renderpass2 ===
  1771. PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0;
  1772. PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0;
  1773. PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0;
  1774. PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0;
  1775. //=== VK_KHR_shared_presentable_image ===
  1776. PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0;
  1777. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  1778. //=== VK_KHR_external_fence_win32 ===
  1779. PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0;
  1780. PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0;
  1781. # else
  1782. PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0;
  1783. PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0;
  1784. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  1785. //=== VK_KHR_external_fence_fd ===
  1786. PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0;
  1787. PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0;
  1788. //=== VK_KHR_performance_query ===
  1789. PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0;
  1790. PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0;
  1791. //=== VK_EXT_debug_utils ===
  1792. PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0;
  1793. PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0;
  1794. PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0;
  1795. PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0;
  1796. PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0;
  1797. PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0;
  1798. PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0;
  1799. PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0;
  1800. # if defined( VK_USE_PLATFORM_ANDROID_KHR )
  1801. //=== VK_ANDROID_external_memory_android_hardware_buffer ===
  1802. PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0;
  1803. PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0;
  1804. # else
  1805. PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0;
  1806. PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0;
  1807. # endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  1808. # if defined( VK_ENABLE_BETA_EXTENSIONS )
  1809. //=== VK_AMDX_shader_enqueue ===
  1810. PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX = 0;
  1811. PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX = 0;
  1812. PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX = 0;
  1813. PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX = 0;
  1814. PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX = 0;
  1815. PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX = 0;
  1816. PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX = 0;
  1817. # else
  1818. PFN_dummy vkCreateExecutionGraphPipelinesAMDX_placeholder = 0;
  1819. PFN_dummy vkGetExecutionGraphPipelineScratchSizeAMDX_placeholder = 0;
  1820. PFN_dummy vkGetExecutionGraphPipelineNodeIndexAMDX_placeholder = 0;
  1821. PFN_dummy vkCmdInitializeGraphScratchMemoryAMDX_placeholder = 0;
  1822. PFN_dummy vkCmdDispatchGraphAMDX_placeholder = 0;
  1823. PFN_dummy vkCmdDispatchGraphIndirectAMDX_placeholder = 0;
  1824. PFN_dummy vkCmdDispatchGraphIndirectCountAMDX_placeholder = 0;
  1825. # endif /*VK_ENABLE_BETA_EXTENSIONS*/
  1826. //=== VK_EXT_sample_locations ===
  1827. PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0;
  1828. //=== VK_KHR_get_memory_requirements2 ===
  1829. PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0;
  1830. PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0;
  1831. PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0;
  1832. //=== VK_KHR_acceleration_structure ===
  1833. PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0;
  1834. PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0;
  1835. PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0;
  1836. PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0;
  1837. PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0;
  1838. PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0;
  1839. PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0;
  1840. PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0;
  1841. PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0;
  1842. PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0;
  1843. PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0;
  1844. PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0;
  1845. PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0;
  1846. PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0;
  1847. PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0;
  1848. PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0;
  1849. //=== VK_KHR_ray_tracing_pipeline ===
  1850. PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0;
  1851. PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0;
  1852. PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0;
  1853. PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0;
  1854. PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0;
  1855. PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0;
  1856. PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0;
  1857. //=== VK_KHR_sampler_ycbcr_conversion ===
  1858. PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0;
  1859. PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0;
  1860. //=== VK_KHR_bind_memory2 ===
  1861. PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0;
  1862. PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0;
  1863. //=== VK_EXT_image_drm_format_modifier ===
  1864. PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0;
  1865. //=== VK_EXT_validation_cache ===
  1866. PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0;
  1867. PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0;
  1868. PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0;
  1869. PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0;
  1870. //=== VK_NV_shading_rate_image ===
  1871. PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0;
  1872. PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0;
  1873. PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0;
  1874. //=== VK_NV_ray_tracing ===
  1875. PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0;
  1876. PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0;
  1877. PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0;
  1878. PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0;
  1879. PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0;
  1880. PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0;
  1881. PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0;
  1882. PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0;
  1883. PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0;
  1884. PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0;
  1885. PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0;
  1886. PFN_vkCompileDeferredNV vkCompileDeferredNV = 0;
  1887. //=== VK_KHR_maintenance3 ===
  1888. PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0;
  1889. //=== VK_KHR_draw_indirect_count ===
  1890. PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0;
  1891. PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0;
  1892. //=== VK_EXT_external_memory_host ===
  1893. PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0;
  1894. //=== VK_AMD_buffer_marker ===
  1895. PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0;
  1896. //=== VK_EXT_calibrated_timestamps ===
  1897. PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0;
  1898. //=== VK_NV_mesh_shader ===
  1899. PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0;
  1900. PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0;
  1901. PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0;
  1902. //=== VK_NV_scissor_exclusive ===
  1903. PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV = 0;
  1904. PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0;
  1905. //=== VK_NV_device_diagnostic_checkpoints ===
  1906. PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0;
  1907. PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0;
  1908. //=== VK_KHR_timeline_semaphore ===
  1909. PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0;
  1910. PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0;
  1911. PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0;
  1912. //=== VK_INTEL_performance_query ===
  1913. PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0;
  1914. PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0;
  1915. PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0;
  1916. PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0;
  1917. PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0;
  1918. PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0;
  1919. PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0;
  1920. PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0;
  1921. PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0;
  1922. //=== VK_AMD_display_native_hdr ===
  1923. PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0;
  1924. //=== VK_KHR_fragment_shading_rate ===
  1925. PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0;
  1926. //=== VK_EXT_buffer_device_address ===
  1927. PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0;
  1928. //=== VK_KHR_present_wait ===
  1929. PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0;
  1930. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  1931. //=== VK_EXT_full_screen_exclusive ===
  1932. PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0;
  1933. PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0;
  1934. PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0;
  1935. # else
  1936. PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0;
  1937. PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0;
  1938. PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0;
  1939. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  1940. //=== VK_KHR_buffer_device_address ===
  1941. PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0;
  1942. PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0;
  1943. PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0;
  1944. //=== VK_EXT_line_rasterization ===
  1945. PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0;
  1946. //=== VK_EXT_host_query_reset ===
  1947. PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0;
  1948. //=== VK_EXT_extended_dynamic_state ===
  1949. PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0;
  1950. PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0;
  1951. PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0;
  1952. PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0;
  1953. PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0;
  1954. PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0;
  1955. PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0;
  1956. PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0;
  1957. PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0;
  1958. PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0;
  1959. PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0;
  1960. PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0;
  1961. //=== VK_KHR_deferred_host_operations ===
  1962. PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0;
  1963. PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0;
  1964. PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0;
  1965. PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0;
  1966. PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0;
  1967. //=== VK_KHR_pipeline_executable_properties ===
  1968. PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0;
  1969. PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0;
  1970. PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0;
  1971. //=== VK_EXT_host_image_copy ===
  1972. PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT = 0;
  1973. PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT = 0;
  1974. PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT = 0;
  1975. PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT = 0;
  1976. PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0;
  1977. //=== VK_KHR_map_memory2 ===
  1978. PFN_vkMapMemory2KHR vkMapMemory2KHR = 0;
  1979. PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR = 0;
  1980. //=== VK_EXT_swapchain_maintenance1 ===
  1981. PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0;
  1982. //=== VK_NV_device_generated_commands ===
  1983. PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0;
  1984. PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0;
  1985. PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0;
  1986. PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0;
  1987. PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0;
  1988. PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0;
  1989. //=== VK_EXT_depth_bias_control ===
  1990. PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT = 0;
  1991. //=== VK_EXT_private_data ===
  1992. PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0;
  1993. PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0;
  1994. PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0;
  1995. PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0;
  1996. # if defined( VK_ENABLE_BETA_EXTENSIONS )
  1997. //=== VK_KHR_video_encode_queue ===
  1998. PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR = 0;
  1999. PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0;
  2000. # else
  2001. PFN_dummy vkGetEncodedVideoSessionParametersKHR_placeholder = 0;
  2002. PFN_dummy vkCmdEncodeVideoKHR_placeholder = 0;
  2003. # endif /*VK_ENABLE_BETA_EXTENSIONS*/
  2004. # if defined( VK_USE_PLATFORM_METAL_EXT )
  2005. //=== VK_EXT_metal_objects ===
  2006. PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0;
  2007. # else
  2008. PFN_dummy vkExportMetalObjectsEXT_placeholder = 0;
  2009. # endif /*VK_USE_PLATFORM_METAL_EXT*/
  2010. //=== VK_KHR_synchronization2 ===
  2011. PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0;
  2012. PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0;
  2013. PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0;
  2014. PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0;
  2015. PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0;
  2016. PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0;
  2017. PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0;
  2018. PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0;
  2019. //=== VK_EXT_descriptor_buffer ===
  2020. PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0;
  2021. PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0;
  2022. PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0;
  2023. PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0;
  2024. PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0;
  2025. PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0;
  2026. PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0;
  2027. PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0;
  2028. PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0;
  2029. PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0;
  2030. PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0;
  2031. //=== VK_NV_fragment_shading_rate_enums ===
  2032. PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0;
  2033. //=== VK_EXT_mesh_shader ===
  2034. PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0;
  2035. PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0;
  2036. PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0;
  2037. //=== VK_KHR_copy_commands2 ===
  2038. PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0;
  2039. PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0;
  2040. PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0;
  2041. PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0;
  2042. PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0;
  2043. PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0;
  2044. //=== VK_EXT_device_fault ===
  2045. PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0;
  2046. //=== VK_EXT_vertex_input_dynamic_state ===
  2047. PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0;
  2048. # if defined( VK_USE_PLATFORM_FUCHSIA )
  2049. //=== VK_FUCHSIA_external_memory ===
  2050. PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0;
  2051. PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0;
  2052. # else
  2053. PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0;
  2054. PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0;
  2055. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  2056. # if defined( VK_USE_PLATFORM_FUCHSIA )
  2057. //=== VK_FUCHSIA_external_semaphore ===
  2058. PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0;
  2059. PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0;
  2060. # else
  2061. PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0;
  2062. PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0;
  2063. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  2064. # if defined( VK_USE_PLATFORM_FUCHSIA )
  2065. //=== VK_FUCHSIA_buffer_collection ===
  2066. PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0;
  2067. PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0;
  2068. PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0;
  2069. PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0;
  2070. PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0;
  2071. # else
  2072. PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0;
  2073. PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0;
  2074. PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0;
  2075. PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0;
  2076. PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0;
  2077. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  2078. //=== VK_HUAWEI_subpass_shading ===
  2079. PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0;
  2080. PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0;
  2081. //=== VK_HUAWEI_invocation_mask ===
  2082. PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0;
  2083. //=== VK_NV_external_memory_rdma ===
  2084. PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0;
  2085. //=== VK_EXT_pipeline_properties ===
  2086. PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0;
  2087. //=== VK_EXT_extended_dynamic_state2 ===
  2088. PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0;
  2089. PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0;
  2090. PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0;
  2091. PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0;
  2092. PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0;
  2093. //=== VK_EXT_color_write_enable ===
  2094. PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0;
  2095. //=== VK_KHR_ray_tracing_maintenance1 ===
  2096. PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0;
  2097. //=== VK_EXT_multi_draw ===
  2098. PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0;
  2099. PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0;
  2100. //=== VK_EXT_opacity_micromap ===
  2101. PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0;
  2102. PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0;
  2103. PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0;
  2104. PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0;
  2105. PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0;
  2106. PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0;
  2107. PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0;
  2108. PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0;
  2109. PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0;
  2110. PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0;
  2111. PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0;
  2112. PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0;
  2113. PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0;
  2114. PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0;
  2115. //=== VK_HUAWEI_cluster_culling_shader ===
  2116. PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI = 0;
  2117. PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI = 0;
  2118. //=== VK_EXT_pageable_device_local_memory ===
  2119. PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0;
  2120. //=== VK_KHR_maintenance4 ===
  2121. PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0;
  2122. PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0;
  2123. PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0;
  2124. //=== VK_VALVE_descriptor_set_host_mapping ===
  2125. PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0;
  2126. PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0;
  2127. //=== VK_NV_copy_memory_indirect ===
  2128. PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0;
  2129. PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0;
  2130. //=== VK_NV_memory_decompression ===
  2131. PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0;
  2132. PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0;
  2133. //=== VK_NV_device_generated_commands_compute ===
  2134. PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV = 0;
  2135. PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV = 0;
  2136. PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV = 0;
  2137. //=== VK_EXT_extended_dynamic_state3 ===
  2138. PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0;
  2139. PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0;
  2140. PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0;
  2141. PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0;
  2142. PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0;
  2143. PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0;
  2144. PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0;
  2145. PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0;
  2146. PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0;
  2147. PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0;
  2148. PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0;
  2149. PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0;
  2150. PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0;
  2151. PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0;
  2152. PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0;
  2153. PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0;
  2154. PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0;
  2155. PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0;
  2156. PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0;
  2157. PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0;
  2158. PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0;
  2159. PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0;
  2160. PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0;
  2161. PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0;
  2162. PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0;
  2163. PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0;
  2164. PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0;
  2165. PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0;
  2166. PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0;
  2167. PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0;
  2168. PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0;
  2169. //=== VK_EXT_shader_module_identifier ===
  2170. PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0;
  2171. PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0;
  2172. //=== VK_NV_optical_flow ===
  2173. PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0;
  2174. PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0;
  2175. PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0;
  2176. PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0;
  2177. //=== VK_KHR_maintenance5 ===
  2178. PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR = 0;
  2179. PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR = 0;
  2180. PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR = 0;
  2181. PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR = 0;
  2182. //=== VK_EXT_shader_object ===
  2183. PFN_vkCreateShadersEXT vkCreateShadersEXT = 0;
  2184. PFN_vkDestroyShaderEXT vkDestroyShaderEXT = 0;
  2185. PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT = 0;
  2186. PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT = 0;
  2187. //=== VK_QCOM_tile_properties ===
  2188. PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0;
  2189. PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0;
  2190. //=== VK_NV_low_latency2 ===
  2191. PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV = 0;
  2192. PFN_vkLatencySleepNV vkLatencySleepNV = 0;
  2193. PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV = 0;
  2194. PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV = 0;
  2195. PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV = 0;
  2196. //=== VK_EXT_attachment_feedback_loop_dynamic_state ===
  2197. PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT = 0;
  2198. # if defined( VK_USE_PLATFORM_SCREEN_QNX )
  2199. //=== VK_QNX_external_memory_screen_buffer ===
  2200. PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX = 0;
  2201. # else
  2202. PFN_dummy vkGetScreenBufferPropertiesQNX_placeholder = 0;
  2203. # endif /*VK_USE_PLATFORM_SCREEN_QNX*/
  2204. };
  2205. //========================================
  2206. //=== RAII HANDLE forward declarations ===
  2207. //========================================
  2208. //=== VK_VERSION_1_0 ===
  2209. class Instance;
  2210. class PhysicalDevice;
  2211. class Device;
  2212. class Queue;
  2213. class DeviceMemory;
  2214. class Fence;
  2215. class Semaphore;
  2216. class Event;
  2217. class QueryPool;
  2218. class Buffer;
  2219. class BufferView;
  2220. class Image;
  2221. class ImageView;
  2222. class ShaderModule;
  2223. class PipelineCache;
  2224. class Pipeline;
  2225. class PipelineLayout;
  2226. class Sampler;
  2227. class DescriptorPool;
  2228. class DescriptorSet;
  2229. class DescriptorSetLayout;
  2230. class Framebuffer;
  2231. class RenderPass;
  2232. class CommandPool;
  2233. class CommandBuffer;
  2234. //=== VK_VERSION_1_1 ===
  2235. class SamplerYcbcrConversion;
  2236. class DescriptorUpdateTemplate;
  2237. //=== VK_VERSION_1_3 ===
  2238. class PrivateDataSlot;
  2239. //=== VK_KHR_surface ===
  2240. class SurfaceKHR;
  2241. //=== VK_KHR_swapchain ===
  2242. class SwapchainKHR;
  2243. //=== VK_KHR_display ===
  2244. class DisplayKHR;
  2245. class DisplayModeKHR;
  2246. //=== VK_EXT_debug_report ===
  2247. class DebugReportCallbackEXT;
  2248. //=== VK_KHR_video_queue ===
  2249. class VideoSessionKHR;
  2250. class VideoSessionParametersKHR;
  2251. //=== VK_NVX_binary_import ===
  2252. class CuModuleNVX;
  2253. class CuFunctionNVX;
  2254. //=== VK_EXT_debug_utils ===
  2255. class DebugUtilsMessengerEXT;
  2256. //=== VK_KHR_acceleration_structure ===
  2257. class AccelerationStructureKHR;
  2258. //=== VK_EXT_validation_cache ===
  2259. class ValidationCacheEXT;
  2260. //=== VK_NV_ray_tracing ===
  2261. class AccelerationStructureNV;
  2262. //=== VK_INTEL_performance_query ===
  2263. class PerformanceConfigurationINTEL;
  2264. //=== VK_KHR_deferred_host_operations ===
  2265. class DeferredOperationKHR;
  2266. //=== VK_NV_device_generated_commands ===
  2267. class IndirectCommandsLayoutNV;
  2268. # if defined( VK_USE_PLATFORM_FUCHSIA )
  2269. //=== VK_FUCHSIA_buffer_collection ===
  2270. class BufferCollectionFUCHSIA;
  2271. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  2272. //=== VK_EXT_opacity_micromap ===
  2273. class MicromapEXT;
  2274. //=== VK_NV_optical_flow ===
  2275. class OpticalFlowSessionNV;
  2276. //=== VK_EXT_shader_object ===
  2277. class ShaderEXT;
  2278. //====================
  2279. //=== RAII HANDLES ===
  2280. //====================
  2281. class Context
  2282. {
  2283. public:
  2284. # if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
  2285. Context()
  2286. : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher(
  2287. m_dynamicLoader.getProcAddress<PFN_vkGetInstanceProcAddr>( "vkGetInstanceProcAddr" ) ) )
  2288. # else
  2289. Context( PFN_vkGetInstanceProcAddr getInstanceProcAddr )
  2290. : m_dispatcher( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher( getInstanceProcAddr ) )
  2291. # endif
  2292. {
  2293. }
  2294. ~Context() = default;
  2295. Context( Context const & ) = delete;
  2296. Context( Context && rhs ) VULKAN_HPP_NOEXCEPT
  2297. # if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
  2298. : m_dynamicLoader( std::move( rhs.m_dynamicLoader ) )
  2299. , m_dispatcher( rhs.m_dispatcher.release() )
  2300. # else
  2301. : m_dispatcher( rhs.m_dispatcher.release() )
  2302. # endif
  2303. {
  2304. }
  2305. Context & operator=( Context const & ) = delete;
  2306. Context & operator =( Context && rhs ) VULKAN_HPP_NOEXCEPT
  2307. {
  2308. if ( this != &rhs )
  2309. {
  2310. # if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
  2311. m_dynamicLoader = std::move( rhs.m_dynamicLoader );
  2312. # endif
  2313. m_dispatcher.reset( rhs.m_dispatcher.release() );
  2314. }
  2315. return *this;
  2316. }
  2317. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher const * getDispatcher() const
  2318. {
  2319. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  2320. return &*m_dispatcher;
  2321. }
  2322. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context & rhs )
  2323. {
  2324. # if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
  2325. std::swap( m_dynamicLoader, rhs.m_dynamicLoader );
  2326. # endif
  2327. m_dispatcher.swap( rhs.m_dispatcher );
  2328. }
  2329. //=== VK_VERSION_1_0 ===
  2330. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Instance
  2331. createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo,
  2332. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2333. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties>
  2334. enumerateInstanceExtensionProperties( Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
  2335. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> enumerateInstanceLayerProperties() const;
  2336. //=== VK_VERSION_1_1 ===
  2337. VULKAN_HPP_NODISCARD uint32_t enumerateInstanceVersion() const;
  2338. private:
  2339. # if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
  2340. VULKAN_HPP_NAMESPACE::DynamicLoader m_dynamicLoader;
  2341. # endif
  2342. std::unique_ptr<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher> m_dispatcher;
  2343. };
  2344. class Instance
  2345. {
  2346. public:
  2347. using CType = VkInstance;
  2348. using CppType = vk::Instance;
  2349. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eInstance;
  2350. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  2351. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance;
  2352. public:
  2353. Instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context const & context,
  2354. VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo,
  2355. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  2356. : m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  2357. {
  2358. VULKAN_HPP_NAMESPACE::Result result =
  2359. static_cast<VULKAN_HPP_NAMESPACE::Result>( context.getDispatcher()->vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ),
  2360. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  2361. reinterpret_cast<VkInstance *>( &m_instance ) ) );
  2362. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  2363. {
  2364. detail::throwResultException( result, "vkCreateInstance" );
  2365. }
  2366. m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher( context.getDispatcher()->vkGetInstanceProcAddr,
  2367. static_cast<VkInstance>( m_instance ) ) );
  2368. }
  2369. Instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Context const & context,
  2370. VkInstance instance,
  2371. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  2372. : m_instance( instance ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  2373. {
  2374. m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher( context.getDispatcher()->vkGetInstanceProcAddr,
  2375. static_cast<VkInstance>( m_instance ) ) );
  2376. }
  2377. Instance( std::nullptr_t ) {}
  2378. ~Instance()
  2379. {
  2380. clear();
  2381. }
  2382. Instance() = delete;
  2383. Instance( Instance const & ) = delete;
  2384. Instance( Instance && rhs ) VULKAN_HPP_NOEXCEPT
  2385. : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) )
  2386. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  2387. , m_dispatcher( rhs.m_dispatcher.release() )
  2388. {
  2389. }
  2390. Instance & operator=( Instance const & ) = delete;
  2391. Instance & operator =( Instance && rhs ) VULKAN_HPP_NOEXCEPT
  2392. {
  2393. if ( this != &rhs )
  2394. {
  2395. std::swap( m_instance, rhs.m_instance );
  2396. std::swap( m_allocator, rhs.m_allocator );
  2397. std::swap( m_dispatcher, rhs.m_dispatcher );
  2398. }
  2399. return *this;
  2400. }
  2401. VULKAN_HPP_NAMESPACE::Instance const & operator*() const VULKAN_HPP_NOEXCEPT
  2402. {
  2403. return m_instance;
  2404. }
  2405. void clear() VULKAN_HPP_NOEXCEPT
  2406. {
  2407. if ( m_instance )
  2408. {
  2409. getDispatcher()->vkDestroyInstance( static_cast<VkInstance>( m_instance ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  2410. }
  2411. m_instance = nullptr;
  2412. m_allocator = nullptr;
  2413. m_dispatcher = nullptr;
  2414. }
  2415. VULKAN_HPP_NAMESPACE::Instance release()
  2416. {
  2417. m_allocator = nullptr;
  2418. m_dispatcher = nullptr;
  2419. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_instance, nullptr );
  2420. }
  2421. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const
  2422. {
  2423. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  2424. return &*m_dispatcher;
  2425. }
  2426. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance & rhs ) VULKAN_HPP_NOEXCEPT
  2427. {
  2428. std::swap( m_instance, rhs.m_instance );
  2429. std::swap( m_allocator, rhs.m_allocator );
  2430. std::swap( m_dispatcher, rhs.m_dispatcher );
  2431. }
  2432. //=== VK_VERSION_1_0 ===
  2433. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice> enumeratePhysicalDevices() const;
  2434. VULKAN_HPP_NODISCARD PFN_vkVoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT;
  2435. //=== VK_VERSION_1_1 ===
  2436. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> enumeratePhysicalDeviceGroups() const;
  2437. //=== VK_KHR_display ===
  2438. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  2439. createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo,
  2440. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2441. # if defined( VK_USE_PLATFORM_XLIB_KHR )
  2442. //=== VK_KHR_xlib_surface ===
  2443. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  2444. createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo,
  2445. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2446. # endif /*VK_USE_PLATFORM_XLIB_KHR*/
  2447. # if defined( VK_USE_PLATFORM_XCB_KHR )
  2448. //=== VK_KHR_xcb_surface ===
  2449. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  2450. createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo,
  2451. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2452. # endif /*VK_USE_PLATFORM_XCB_KHR*/
  2453. # if defined( VK_USE_PLATFORM_WAYLAND_KHR )
  2454. //=== VK_KHR_wayland_surface ===
  2455. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  2456. createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo,
  2457. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2458. # endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  2459. # if defined( VK_USE_PLATFORM_ANDROID_KHR )
  2460. //=== VK_KHR_android_surface ===
  2461. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  2462. createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo,
  2463. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2464. # endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  2465. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  2466. //=== VK_KHR_win32_surface ===
  2467. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  2468. createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo,
  2469. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2470. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  2471. //=== VK_EXT_debug_report ===
  2472. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT
  2473. createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo,
  2474. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2475. void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
  2476. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_,
  2477. uint64_t object,
  2478. size_t location,
  2479. int32_t messageCode,
  2480. const std::string & layerPrefix,
  2481. const std::string & message ) const VULKAN_HPP_NOEXCEPT;
  2482. # if defined( VK_USE_PLATFORM_GGP )
  2483. //=== VK_GGP_stream_descriptor_surface ===
  2484. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  2485. createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo,
  2486. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2487. # endif /*VK_USE_PLATFORM_GGP*/
  2488. # if defined( VK_USE_PLATFORM_VI_NN )
  2489. //=== VK_NN_vi_surface ===
  2490. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  2491. createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo,
  2492. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2493. # endif /*VK_USE_PLATFORM_VI_NN*/
  2494. //=== VK_KHR_device_group_creation ===
  2495. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> enumeratePhysicalDeviceGroupsKHR() const;
  2496. # if defined( VK_USE_PLATFORM_IOS_MVK )
  2497. //=== VK_MVK_ios_surface ===
  2498. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  2499. createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo,
  2500. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2501. # endif /*VK_USE_PLATFORM_IOS_MVK*/
  2502. # if defined( VK_USE_PLATFORM_MACOS_MVK )
  2503. //=== VK_MVK_macos_surface ===
  2504. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  2505. createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo,
  2506. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2507. # endif /*VK_USE_PLATFORM_MACOS_MVK*/
  2508. //=== VK_EXT_debug_utils ===
  2509. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT
  2510. createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo,
  2511. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2512. void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
  2513. VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
  2514. const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData ) const VULKAN_HPP_NOEXCEPT;
  2515. # if defined( VK_USE_PLATFORM_FUCHSIA )
  2516. //=== VK_FUCHSIA_imagepipe_surface ===
  2517. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  2518. createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo,
  2519. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2520. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  2521. # if defined( VK_USE_PLATFORM_METAL_EXT )
  2522. //=== VK_EXT_metal_surface ===
  2523. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  2524. createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo,
  2525. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2526. # endif /*VK_USE_PLATFORM_METAL_EXT*/
  2527. //=== VK_EXT_headless_surface ===
  2528. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  2529. createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo,
  2530. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2531. # if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
  2532. //=== VK_EXT_directfb_surface ===
  2533. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  2534. createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo,
  2535. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2536. # endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  2537. # if defined( VK_USE_PLATFORM_SCREEN_QNX )
  2538. //=== VK_QNX_screen_surface ===
  2539. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  2540. createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo,
  2541. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2542. # endif /*VK_USE_PLATFORM_SCREEN_QNX*/
  2543. private:
  2544. VULKAN_HPP_NAMESPACE::Instance m_instance = {};
  2545. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  2546. std::unique_ptr<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher> m_dispatcher;
  2547. };
  2548. class PhysicalDevice
  2549. {
  2550. public:
  2551. using CType = VkPhysicalDevice;
  2552. using CppType = vk::PhysicalDevice;
  2553. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice;
  2554. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  2555. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice;
  2556. public:
  2557. PhysicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance, VkPhysicalDevice physicalDevice )
  2558. : m_physicalDevice( physicalDevice ), m_dispatcher( instance.getDispatcher() )
  2559. {
  2560. }
  2561. PhysicalDevice( std::nullptr_t ) {}
  2562. ~PhysicalDevice()
  2563. {
  2564. clear();
  2565. }
  2566. PhysicalDevice() = delete;
  2567. PhysicalDevice( PhysicalDevice const & rhs ) : m_physicalDevice( rhs.m_physicalDevice ), m_dispatcher( rhs.m_dispatcher ) {}
  2568. PhysicalDevice( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT
  2569. : m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) )
  2570. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  2571. {
  2572. }
  2573. PhysicalDevice & operator=( PhysicalDevice const & rhs )
  2574. {
  2575. m_physicalDevice = rhs.m_physicalDevice;
  2576. m_dispatcher = rhs.m_dispatcher;
  2577. return *this;
  2578. }
  2579. PhysicalDevice & operator=( PhysicalDevice && rhs ) VULKAN_HPP_NOEXCEPT
  2580. {
  2581. if ( this != &rhs )
  2582. {
  2583. std::swap( m_physicalDevice, rhs.m_physicalDevice );
  2584. std::swap( m_dispatcher, rhs.m_dispatcher );
  2585. }
  2586. return *this;
  2587. }
  2588. VULKAN_HPP_NAMESPACE::PhysicalDevice const & operator*() const VULKAN_HPP_NOEXCEPT
  2589. {
  2590. return m_physicalDevice;
  2591. }
  2592. void clear() VULKAN_HPP_NOEXCEPT
  2593. {
  2594. m_physicalDevice = nullptr;
  2595. m_dispatcher = nullptr;
  2596. }
  2597. VULKAN_HPP_NAMESPACE::PhysicalDevice release()
  2598. {
  2599. m_dispatcher = nullptr;
  2600. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_physicalDevice, nullptr );
  2601. }
  2602. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const
  2603. {
  2604. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  2605. return m_dispatcher;
  2606. }
  2607. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice & rhs ) VULKAN_HPP_NOEXCEPT
  2608. {
  2609. std::swap( m_physicalDevice, rhs.m_physicalDevice );
  2610. std::swap( m_dispatcher, rhs.m_dispatcher );
  2611. }
  2612. //=== VK_VERSION_1_0 ===
  2613. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures() const VULKAN_HPP_NOEXCEPT;
  2614. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
  2615. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties
  2616. getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
  2617. VULKAN_HPP_NAMESPACE::ImageType type,
  2618. VULKAN_HPP_NAMESPACE::ImageTiling tiling,
  2619. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  2620. VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
  2621. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties() const VULKAN_HPP_NOEXCEPT;
  2622. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties> getQueueFamilyProperties() const;
  2623. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties() const VULKAN_HPP_NOEXCEPT;
  2624. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Device
  2625. createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo,
  2626. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2627. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties>
  2628. enumerateDeviceExtensionProperties( Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
  2629. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> enumerateDeviceLayerProperties() const;
  2630. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>
  2631. getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
  2632. VULKAN_HPP_NAMESPACE::ImageType type,
  2633. VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
  2634. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  2635. VULKAN_HPP_NAMESPACE::ImageTiling tiling ) const;
  2636. //=== VK_VERSION_1_1 ===
  2637. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2() const VULKAN_HPP_NOEXCEPT;
  2638. template <typename X, typename Y, typename... Z>
  2639. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFeatures2() const VULKAN_HPP_NOEXCEPT;
  2640. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2() const VULKAN_HPP_NOEXCEPT;
  2641. template <typename X, typename Y, typename... Z>
  2642. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getProperties2() const VULKAN_HPP_NOEXCEPT;
  2643. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
  2644. template <typename X, typename Y, typename... Z>
  2645. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  2646. getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
  2647. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties2
  2648. getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const;
  2649. template <typename X, typename Y, typename... Z>
  2650. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  2651. getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const;
  2652. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> getQueueFamilyProperties2() const;
  2653. template <typename StructureChain>
  2654. VULKAN_HPP_NODISCARD std::vector<StructureChain> getQueueFamilyProperties2() const;
  2655. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2() const VULKAN_HPP_NOEXCEPT;
  2656. template <typename X, typename Y, typename... Z>
  2657. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getMemoryProperties2() const VULKAN_HPP_NOEXCEPT;
  2658. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>
  2659. getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const;
  2660. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties
  2661. getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT;
  2662. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties
  2663. getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT;
  2664. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
  2665. getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT;
  2666. //=== VK_VERSION_1_3 ===
  2667. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> getToolProperties() const;
  2668. //=== VK_KHR_surface ===
  2669. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
  2670. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
  2671. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>
  2672. getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
  2673. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR>
  2674. getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
  2675. //=== VK_KHR_swapchain ===
  2676. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::Rect2D> getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
  2677. //=== VK_KHR_display ===
  2678. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR> getDisplayPropertiesKHR() const;
  2679. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR> getDisplayPlanePropertiesKHR() const;
  2680. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::DisplayKHR> getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const;
  2681. # if defined( VK_USE_PLATFORM_XLIB_KHR )
  2682. //=== VK_KHR_xlib_surface ===
  2683. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32
  2684. getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT;
  2685. # endif /*VK_USE_PLATFORM_XLIB_KHR*/
  2686. # if defined( VK_USE_PLATFORM_XCB_KHR )
  2687. //=== VK_KHR_xcb_surface ===
  2688. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32
  2689. getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT;
  2690. # endif /*VK_USE_PLATFORM_XCB_KHR*/
  2691. # if defined( VK_USE_PLATFORM_WAYLAND_KHR )
  2692. //=== VK_KHR_wayland_surface ===
  2693. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex,
  2694. struct wl_display & display ) const VULKAN_HPP_NOEXCEPT;
  2695. # endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  2696. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  2697. //=== VK_KHR_win32_surface ===
  2698. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT;
  2699. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  2700. //=== VK_KHR_video_queue ===
  2701. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR
  2702. getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const;
  2703. template <typename X, typename Y, typename... Z>
  2704. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  2705. getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const;
  2706. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>
  2707. getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const;
  2708. //=== VK_NV_external_memory_capabilities ===
  2709. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV getExternalImageFormatPropertiesNV(
  2710. VULKAN_HPP_NAMESPACE::Format format,
  2711. VULKAN_HPP_NAMESPACE::ImageType type,
  2712. VULKAN_HPP_NAMESPACE::ImageTiling tiling,
  2713. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  2714. VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
  2715. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
  2716. //=== VK_KHR_get_physical_device_properties2 ===
  2717. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR() const VULKAN_HPP_NOEXCEPT;
  2718. template <typename X, typename Y, typename... Z>
  2719. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getFeatures2KHR() const VULKAN_HPP_NOEXCEPT;
  2720. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR() const VULKAN_HPP_NOEXCEPT;
  2721. template <typename X, typename Y, typename... Z>
  2722. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getProperties2KHR() const VULKAN_HPP_NOEXCEPT;
  2723. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
  2724. template <typename X, typename Y, typename... Z>
  2725. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  2726. getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT;
  2727. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageFormatProperties2
  2728. getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const;
  2729. template <typename X, typename Y, typename... Z>
  2730. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  2731. getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const;
  2732. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> getQueueFamilyProperties2KHR() const;
  2733. template <typename StructureChain>
  2734. VULKAN_HPP_NODISCARD std::vector<StructureChain> getQueueFamilyProperties2KHR() const;
  2735. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT;
  2736. template <typename X, typename Y, typename... Z>
  2737. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT;
  2738. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>
  2739. getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const;
  2740. //=== VK_KHR_external_memory_capabilities ===
  2741. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties
  2742. getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT;
  2743. //=== VK_KHR_external_semaphore_capabilities ===
  2744. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
  2745. getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT;
  2746. # if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
  2747. //=== VK_EXT_acquire_xlib_display ===
  2748. void acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const;
  2749. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput ) const;
  2750. # endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
  2751. //=== VK_EXT_display_surface_counter ===
  2752. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
  2753. //=== VK_KHR_external_fence_capabilities ===
  2754. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties
  2755. getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT;
  2756. //=== VK_KHR_performance_query ===
  2757. VULKAN_HPP_NODISCARD
  2758. std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>>
  2759. enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex ) const;
  2760. VULKAN_HPP_NODISCARD uint32_t getQueueFamilyPerformanceQueryPassesKHR(
  2761. const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo ) const VULKAN_HPP_NOEXCEPT;
  2762. //=== VK_KHR_get_surface_capabilities2 ===
  2763. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR
  2764. getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
  2765. template <typename X, typename Y, typename... Z>
  2766. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  2767. getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
  2768. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>
  2769. getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
  2770. template <typename StructureChain>
  2771. VULKAN_HPP_NODISCARD std::vector<StructureChain> getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
  2772. //=== VK_KHR_get_display_properties2 ===
  2773. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR> getDisplayProperties2KHR() const;
  2774. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR> getDisplayPlaneProperties2KHR() const;
  2775. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR
  2776. getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const;
  2777. //=== VK_EXT_sample_locations ===
  2778. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT
  2779. getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples ) const VULKAN_HPP_NOEXCEPT;
  2780. //=== VK_EXT_calibrated_timestamps ===
  2781. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT> getCalibrateableTimeDomainsEXT() const;
  2782. //=== VK_KHR_fragment_shading_rate ===
  2783. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR> getFragmentShadingRatesKHR() const;
  2784. //=== VK_EXT_tooling_info ===
  2785. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> getToolPropertiesEXT() const;
  2786. //=== VK_NV_cooperative_matrix ===
  2787. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV> getCooperativeMatrixPropertiesNV() const;
  2788. //=== VK_NV_coverage_reduction_mode ===
  2789. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV> getSupportedFramebufferMixedSamplesCombinationsNV() const;
  2790. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  2791. //=== VK_EXT_full_screen_exclusive ===
  2792. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR>
  2793. getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
  2794. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  2795. //=== VK_EXT_acquire_drm_display ===
  2796. void acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const;
  2797. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const;
  2798. # if defined( VK_ENABLE_BETA_EXTENSIONS )
  2799. //=== VK_KHR_video_encode_queue ===
  2800. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR
  2801. getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const;
  2802. template <typename X, typename Y, typename... Z>
  2803. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  2804. getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const;
  2805. # endif /*VK_ENABLE_BETA_EXTENSIONS*/
  2806. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  2807. //=== VK_NV_acquire_winrt_display ===
  2808. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayKHR getWinrtDisplayNV( uint32_t deviceRelativeId ) const;
  2809. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  2810. # if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
  2811. //=== VK_EXT_directfb_surface ===
  2812. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex,
  2813. IDirectFB & dfb ) const VULKAN_HPP_NOEXCEPT;
  2814. # endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  2815. # if defined( VK_USE_PLATFORM_SCREEN_QNX )
  2816. //=== VK_QNX_screen_surface ===
  2817. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Bool32 getScreenPresentationSupportQNX( uint32_t queueFamilyIndex,
  2818. struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT;
  2819. # endif /*VK_USE_PLATFORM_SCREEN_QNX*/
  2820. //=== VK_NV_optical_flow ===
  2821. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>
  2822. getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo ) const;
  2823. //=== VK_KHR_cooperative_matrix ===
  2824. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR> getCooperativeMatrixPropertiesKHR() const;
  2825. private:
  2826. VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {};
  2827. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr;
  2828. };
  2829. class PhysicalDevices : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice>
  2830. {
  2831. public:
  2832. PhysicalDevices( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance )
  2833. {
  2834. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * dispatcher = instance.getDispatcher();
  2835. std::vector<VkPhysicalDevice> physicalDevices;
  2836. uint32_t physicalDeviceCount;
  2837. VULKAN_HPP_NAMESPACE::Result result;
  2838. do
  2839. {
  2840. result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  2841. dispatcher->vkEnumeratePhysicalDevices( static_cast<VkInstance>( *instance ), &physicalDeviceCount, nullptr ) );
  2842. if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && physicalDeviceCount )
  2843. {
  2844. physicalDevices.resize( physicalDeviceCount );
  2845. result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  2846. dispatcher->vkEnumeratePhysicalDevices( static_cast<VkInstance>( *instance ), &physicalDeviceCount, physicalDevices.data() ) );
  2847. }
  2848. } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
  2849. if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
  2850. {
  2851. VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
  2852. this->reserve( physicalDeviceCount );
  2853. for ( auto const & physicalDevice : physicalDevices )
  2854. {
  2855. this->emplace_back( instance, physicalDevice );
  2856. }
  2857. }
  2858. else
  2859. {
  2860. detail::throwResultException( result, "vkEnumeratePhysicalDevices" );
  2861. }
  2862. }
  2863. PhysicalDevices( std::nullptr_t ) {}
  2864. PhysicalDevices() = delete;
  2865. PhysicalDevices( PhysicalDevices const & ) = delete;
  2866. PhysicalDevices( PhysicalDevices && rhs ) = default;
  2867. PhysicalDevices & operator=( PhysicalDevices const & ) = delete;
  2868. PhysicalDevices & operator=( PhysicalDevices && rhs ) = default;
  2869. };
  2870. class Device
  2871. {
  2872. public:
  2873. using CType = VkDevice;
  2874. using CppType = vk::Device;
  2875. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDevice;
  2876. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  2877. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice;
  2878. public:
  2879. Device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice,
  2880. VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo,
  2881. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  2882. : m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  2883. {
  2884. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  2885. physicalDevice.getDispatcher()->vkCreateDevice( static_cast<VkPhysicalDevice>( *physicalDevice ),
  2886. reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
  2887. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  2888. reinterpret_cast<VkDevice *>( &m_device ) ) );
  2889. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  2890. {
  2891. detail::throwResultException( result, "vkCreateDevice" );
  2892. }
  2893. m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher( physicalDevice.getDispatcher()->vkGetDeviceProcAddr,
  2894. static_cast<VkDevice>( m_device ) ) );
  2895. }
  2896. Device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice,
  2897. VkDevice device,
  2898. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  2899. : m_device( device ), m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  2900. {
  2901. m_dispatcher.reset( new VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher( physicalDevice.getDispatcher()->vkGetDeviceProcAddr,
  2902. static_cast<VkDevice>( m_device ) ) );
  2903. }
  2904. Device( std::nullptr_t ) {}
  2905. ~Device()
  2906. {
  2907. clear();
  2908. }
  2909. Device() = delete;
  2910. Device( Device const & ) = delete;
  2911. Device( Device && rhs ) VULKAN_HPP_NOEXCEPT
  2912. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  2913. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  2914. , m_dispatcher( rhs.m_dispatcher.release() )
  2915. {
  2916. }
  2917. Device & operator=( Device const & ) = delete;
  2918. Device & operator =( Device && rhs ) VULKAN_HPP_NOEXCEPT
  2919. {
  2920. if ( this != &rhs )
  2921. {
  2922. std::swap( m_device, rhs.m_device );
  2923. std::swap( m_allocator, rhs.m_allocator );
  2924. std::swap( m_dispatcher, rhs.m_dispatcher );
  2925. }
  2926. return *this;
  2927. }
  2928. VULKAN_HPP_NAMESPACE::Device const & operator*() const VULKAN_HPP_NOEXCEPT
  2929. {
  2930. return m_device;
  2931. }
  2932. void clear() VULKAN_HPP_NOEXCEPT
  2933. {
  2934. if ( m_device )
  2935. {
  2936. getDispatcher()->vkDestroyDevice( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  2937. }
  2938. m_device = nullptr;
  2939. m_allocator = nullptr;
  2940. m_dispatcher = nullptr;
  2941. }
  2942. VULKAN_HPP_NAMESPACE::Device release()
  2943. {
  2944. m_allocator = nullptr;
  2945. m_dispatcher = nullptr;
  2946. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_device, nullptr );
  2947. }
  2948. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  2949. {
  2950. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  2951. return &*m_dispatcher;
  2952. }
  2953. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device & rhs ) VULKAN_HPP_NOEXCEPT
  2954. {
  2955. std::swap( m_device, rhs.m_device );
  2956. std::swap( m_allocator, rhs.m_allocator );
  2957. std::swap( m_dispatcher, rhs.m_dispatcher );
  2958. }
  2959. //=== VK_VERSION_1_0 ===
  2960. VULKAN_HPP_NODISCARD PFN_vkVoidFunction getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT;
  2961. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const;
  2962. void waitIdle() const;
  2963. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DeviceMemory
  2964. allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo,
  2965. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2966. void flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const;
  2967. void invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const;
  2968. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence
  2969. createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo,
  2970. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2971. void resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences ) const;
  2972. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
  2973. VULKAN_HPP_NAMESPACE::Bool32 waitAll,
  2974. uint64_t timeout ) const;
  2975. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Semaphore
  2976. createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo,
  2977. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2978. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Event
  2979. createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo,
  2980. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2981. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::QueryPool
  2982. createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo,
  2983. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2984. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Buffer
  2985. createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo,
  2986. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2987. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::BufferView
  2988. createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo,
  2989. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2990. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Image
  2991. createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo,
  2992. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2993. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ImageView
  2994. createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo,
  2995. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2996. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ShaderModule
  2997. createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo,
  2998. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  2999. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PipelineCache
  3000. createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo,
  3001. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3002. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline>
  3003. createGraphicsPipelines( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  3004. VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
  3005. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3006. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline
  3007. createGraphicsPipeline( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  3008. VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo,
  3009. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3010. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline>
  3011. createComputePipelines( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  3012. VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
  3013. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3014. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline
  3015. createComputePipeline( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  3016. VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo,
  3017. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3018. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PipelineLayout
  3019. createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo,
  3020. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3021. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Sampler
  3022. createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo,
  3023. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3024. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout
  3025. createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo,
  3026. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3027. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorPool
  3028. createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo,
  3029. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3030. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::DescriptorSet>
  3031. allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const;
  3032. void updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
  3033. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies ) const
  3034. VULKAN_HPP_NOEXCEPT;
  3035. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Framebuffer
  3036. createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo,
  3037. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3038. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass
  3039. createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo,
  3040. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3041. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CommandPool
  3042. createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo,
  3043. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3044. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::CommandBuffer>
  3045. allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const;
  3046. //=== VK_VERSION_1_1 ===
  3047. void bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const;
  3048. void bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const;
  3049. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
  3050. getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT;
  3051. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
  3052. getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
  3053. template <typename X, typename Y, typename... Z>
  3054. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  3055. getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
  3056. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
  3057. getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
  3058. template <typename X, typename Y, typename... Z>
  3059. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  3060. getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
  3061. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
  3062. getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const;
  3063. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Queue getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const;
  3064. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion
  3065. createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
  3066. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3067. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate
  3068. createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
  3069. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3070. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
  3071. getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
  3072. template <typename X, typename Y, typename... Z>
  3073. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  3074. getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
  3075. //=== VK_VERSION_1_2 ===
  3076. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass
  3077. createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo,
  3078. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3079. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const;
  3080. void signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const;
  3081. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress
  3082. getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
  3083. VULKAN_HPP_NODISCARD uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
  3084. VULKAN_HPP_NODISCARD uint64_t
  3085. getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
  3086. //=== VK_VERSION_1_3 ===
  3087. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot
  3088. createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
  3089. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3090. void setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
  3091. uint64_t objectHandle,
  3092. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  3093. uint64_t data ) const;
  3094. VULKAN_HPP_NODISCARD uint64_t getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
  3095. uint64_t objectHandle,
  3096. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT;
  3097. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
  3098. getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
  3099. template <typename X, typename Y, typename... Z>
  3100. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  3101. getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
  3102. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
  3103. getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
  3104. template <typename X, typename Y, typename... Z>
  3105. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  3106. getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
  3107. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
  3108. getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const;
  3109. //=== VK_KHR_swapchain ===
  3110. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR
  3111. createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
  3112. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3113. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR getGroupPresentCapabilitiesKHR() const;
  3114. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR
  3115. getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const;
  3116. VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::Result, uint32_t>
  3117. acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo ) const;
  3118. //=== VK_KHR_display_swapchain ===
  3119. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR>
  3120. createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
  3121. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3122. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR
  3123. createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
  3124. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3125. //=== VK_EXT_debug_marker ===
  3126. void debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo ) const;
  3127. void debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo ) const;
  3128. //=== VK_KHR_video_queue ===
  3129. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR
  3130. createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo,
  3131. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3132. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR
  3133. createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo,
  3134. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3135. //=== VK_NVX_binary_import ===
  3136. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX
  3137. createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo,
  3138. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3139. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX
  3140. createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo,
  3141. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3142. //=== VK_NVX_image_view_handle ===
  3143. VULKAN_HPP_NODISCARD uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT;
  3144. //=== VK_KHR_device_group ===
  3145. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
  3146. getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT;
  3147. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  3148. //=== VK_KHR_external_memory_win32 ===
  3149. VULKAN_HPP_NODISCARD HANDLE getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const;
  3150. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR
  3151. getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle ) const;
  3152. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  3153. //=== VK_KHR_external_memory_fd ===
  3154. VULKAN_HPP_NODISCARD int getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo ) const;
  3155. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR
  3156. getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd ) const;
  3157. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  3158. //=== VK_KHR_external_semaphore_win32 ===
  3159. void importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const;
  3160. VULKAN_HPP_NODISCARD HANDLE getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const;
  3161. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  3162. //=== VK_KHR_external_semaphore_fd ===
  3163. void importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo ) const;
  3164. VULKAN_HPP_NODISCARD int getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo ) const;
  3165. //=== VK_KHR_descriptor_update_template ===
  3166. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate
  3167. createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
  3168. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3169. void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
  3170. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
  3171. VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  3172. //=== VK_EXT_display_control ===
  3173. void displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const;
  3174. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence
  3175. registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo,
  3176. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3177. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Fence
  3178. registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display,
  3179. VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo,
  3180. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3181. //=== VK_EXT_hdr_metadata ===
  3182. void setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
  3183. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata ) const;
  3184. //=== VK_KHR_create_renderpass2 ===
  3185. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::RenderPass
  3186. createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo,
  3187. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3188. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  3189. //=== VK_KHR_external_fence_win32 ===
  3190. void importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const;
  3191. VULKAN_HPP_NODISCARD HANDLE getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const;
  3192. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  3193. //=== VK_KHR_external_fence_fd ===
  3194. void importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo ) const;
  3195. VULKAN_HPP_NODISCARD int getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo ) const;
  3196. //=== VK_KHR_performance_query ===
  3197. void acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info ) const;
  3198. void releaseProfilingLockKHR() const VULKAN_HPP_NOEXCEPT;
  3199. //=== VK_EXT_debug_utils ===
  3200. void setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo ) const;
  3201. void setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo ) const;
  3202. # if defined( VK_USE_PLATFORM_ANDROID_KHR )
  3203. //=== VK_ANDROID_external_memory_android_hardware_buffer ===
  3204. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID
  3205. getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const;
  3206. template <typename X, typename Y, typename... Z>
  3207. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  3208. getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const;
  3209. VULKAN_HPP_NODISCARD struct AHardwareBuffer *
  3210. getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const;
  3211. # endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  3212. # if defined( VK_ENABLE_BETA_EXTENSIONS )
  3213. //=== VK_AMDX_shader_enqueue ===
  3214. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> createExecutionGraphPipelinesAMDX(
  3215. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  3216. VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
  3217. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3218. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createExecutionGraphPipelineAMDX(
  3219. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  3220. VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo,
  3221. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3222. # endif /*VK_ENABLE_BETA_EXTENSIONS*/
  3223. //=== VK_KHR_get_memory_requirements2 ===
  3224. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
  3225. getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
  3226. template <typename X, typename Y, typename... Z>
  3227. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  3228. getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
  3229. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
  3230. getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
  3231. template <typename X, typename Y, typename... Z>
  3232. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  3233. getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT;
  3234. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
  3235. getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const;
  3236. //=== VK_KHR_acceleration_structure ===
  3237. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR
  3238. createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo,
  3239. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3240. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result buildAccelerationStructuresKHR(
  3241. VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  3242. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
  3243. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos ) const;
  3244. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
  3245. copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  3246. const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const;
  3247. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
  3248. copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  3249. const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const;
  3250. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
  3251. copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  3252. const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const;
  3253. template <typename DataType>
  3254. VULKAN_HPP_NODISCARD std::vector<DataType> writeAccelerationStructuresPropertiesKHR(
  3255. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
  3256. VULKAN_HPP_NAMESPACE::QueryType queryType,
  3257. size_t dataSize,
  3258. size_t stride ) const;
  3259. template <typename DataType>
  3260. VULKAN_HPP_NODISCARD DataType writeAccelerationStructuresPropertyKHR(
  3261. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
  3262. VULKAN_HPP_NAMESPACE::QueryType queryType,
  3263. size_t stride ) const;
  3264. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress
  3265. getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
  3266. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
  3267. getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo ) const VULKAN_HPP_NOEXCEPT;
  3268. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR(
  3269. VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
  3270. const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,
  3271. VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
  3272. //=== VK_KHR_ray_tracing_pipeline ===
  3273. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> createRayTracingPipelinesKHR(
  3274. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
  3275. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  3276. VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
  3277. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3278. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline createRayTracingPipelineKHR(
  3279. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
  3280. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  3281. VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo,
  3282. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3283. //=== VK_KHR_sampler_ycbcr_conversion ===
  3284. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion
  3285. createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
  3286. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3287. void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
  3288. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
  3289. VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  3290. //=== VK_KHR_bind_memory2 ===
  3291. void bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const;
  3292. void bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const;
  3293. //=== VK_EXT_validation_cache ===
  3294. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT
  3295. createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo,
  3296. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3297. //=== VK_NV_ray_tracing ===
  3298. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV
  3299. createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo,
  3300. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3301. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV(
  3302. const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT;
  3303. template <typename X, typename Y, typename... Z>
  3304. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV(
  3305. const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT;
  3306. void bindAccelerationStructureMemoryNV(
  3307. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos ) const;
  3308. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline>
  3309. createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  3310. VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
  3311. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3312. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::Pipeline
  3313. createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  3314. VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo,
  3315. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3316. //=== VK_KHR_maintenance3 ===
  3317. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
  3318. getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
  3319. template <typename X, typename Y, typename... Z>
  3320. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  3321. getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
  3322. //=== VK_EXT_external_memory_host ===
  3323. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT
  3324. getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer ) const;
  3325. //=== VK_EXT_calibrated_timestamps ===
  3326. VULKAN_HPP_NODISCARD std::pair<std::vector<uint64_t>, uint64_t>
  3327. getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos ) const;
  3328. VULKAN_HPP_NODISCARD std::pair<uint64_t, uint64_t>
  3329. getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo ) const;
  3330. //=== VK_KHR_timeline_semaphore ===
  3331. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout ) const;
  3332. void signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const;
  3333. //=== VK_INTEL_performance_query ===
  3334. void initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo ) const;
  3335. void uninitializePerformanceApiINTEL() const VULKAN_HPP_NOEXCEPT;
  3336. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL
  3337. acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const;
  3338. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PerformanceValueINTEL
  3339. getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const;
  3340. //=== VK_EXT_buffer_device_address ===
  3341. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress
  3342. getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
  3343. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  3344. //=== VK_EXT_full_screen_exclusive ===
  3345. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR
  3346. getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const;
  3347. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  3348. //=== VK_KHR_buffer_device_address ===
  3349. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress
  3350. getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
  3351. VULKAN_HPP_NODISCARD uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
  3352. VULKAN_HPP_NODISCARD uint64_t
  3353. getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT;
  3354. //=== VK_KHR_deferred_host_operations ===
  3355. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR
  3356. createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3357. //=== VK_KHR_pipeline_executable_properties ===
  3358. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>
  3359. getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo ) const;
  3360. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>
  3361. getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const;
  3362. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>
  3363. getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const;
  3364. //=== VK_EXT_host_image_copy ===
  3365. void copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo ) const;
  3366. void copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo ) const;
  3367. void copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo ) const;
  3368. void transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT> const & transitions ) const;
  3369. //=== VK_KHR_map_memory2 ===
  3370. VULKAN_HPP_NODISCARD void * mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo ) const;
  3371. void unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT;
  3372. //=== VK_EXT_swapchain_maintenance1 ===
  3373. void releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo ) const;
  3374. //=== VK_NV_device_generated_commands ===
  3375. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
  3376. getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT;
  3377. template <typename X, typename Y, typename... Z>
  3378. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  3379. getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT;
  3380. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV
  3381. createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo,
  3382. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3383. //=== VK_EXT_private_data ===
  3384. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot
  3385. createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
  3386. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3387. void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
  3388. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator
  3389. VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  3390. void setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
  3391. uint64_t objectHandle,
  3392. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  3393. uint64_t data ) const;
  3394. VULKAN_HPP_NODISCARD uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
  3395. uint64_t objectHandle,
  3396. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT;
  3397. # if defined( VK_ENABLE_BETA_EXTENSIONS )
  3398. //=== VK_KHR_video_encode_queue ===
  3399. VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t>>
  3400. getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const;
  3401. template <typename X, typename Y, typename... Z>
  3402. VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t>>
  3403. getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const;
  3404. # endif /*VK_ENABLE_BETA_EXTENSIONS*/
  3405. # if defined( VK_USE_PLATFORM_METAL_EXT )
  3406. //=== VK_EXT_metal_objects ===
  3407. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT;
  3408. template <typename X, typename Y, typename... Z>
  3409. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT;
  3410. # endif /*VK_USE_PLATFORM_METAL_EXT*/
  3411. //=== VK_EXT_descriptor_buffer ===
  3412. void getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo, size_t dataSize, void * pDescriptor ) const VULKAN_HPP_NOEXCEPT;
  3413. template <typename DescriptorType>
  3414. VULKAN_HPP_NODISCARD DescriptorType getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo ) const VULKAN_HPP_NOEXCEPT;
  3415. template <typename DataType>
  3416. VULKAN_HPP_NODISCARD DataType getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info ) const;
  3417. template <typename DataType>
  3418. VULKAN_HPP_NODISCARD DataType getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info ) const;
  3419. template <typename DataType>
  3420. VULKAN_HPP_NODISCARD DataType
  3421. getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info ) const;
  3422. template <typename DataType>
  3423. VULKAN_HPP_NODISCARD DataType getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info ) const;
  3424. template <typename DataType>
  3425. VULKAN_HPP_NODISCARD DataType
  3426. getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info ) const;
  3427. //=== VK_EXT_device_fault ===
  3428. VULKAN_HPP_NODISCARD
  3429. std::pair<VULKAN_HPP_NAMESPACE::Result, std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>>
  3430. getFaultInfoEXT() const;
  3431. # if defined( VK_USE_PLATFORM_FUCHSIA )
  3432. //=== VK_FUCHSIA_external_memory ===
  3433. VULKAN_HPP_NODISCARD zx_handle_t getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const;
  3434. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA
  3435. getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle ) const;
  3436. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  3437. # if defined( VK_USE_PLATFORM_FUCHSIA )
  3438. //=== VK_FUCHSIA_external_semaphore ===
  3439. void importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo ) const;
  3440. VULKAN_HPP_NODISCARD zx_handle_t
  3441. getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const;
  3442. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  3443. # if defined( VK_USE_PLATFORM_FUCHSIA )
  3444. //=== VK_FUCHSIA_buffer_collection ===
  3445. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA
  3446. createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo,
  3447. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3448. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  3449. //=== VK_NV_external_memory_rdma ===
  3450. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::RemoteAddressNV
  3451. getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo ) const;
  3452. //=== VK_EXT_pipeline_properties ===
  3453. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::BaseOutStructure getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo ) const;
  3454. //=== VK_EXT_opacity_micromap ===
  3455. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::MicromapEXT
  3456. createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo,
  3457. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3458. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result
  3459. buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  3460. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const;
  3461. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  3462. const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const;
  3463. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  3464. const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const;
  3465. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  3466. const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const;
  3467. template <typename DataType>
  3468. VULKAN_HPP_NODISCARD std::vector<DataType>
  3469. writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
  3470. VULKAN_HPP_NAMESPACE::QueryType queryType,
  3471. size_t dataSize,
  3472. size_t stride ) const;
  3473. template <typename DataType>
  3474. VULKAN_HPP_NODISCARD DataType writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
  3475. VULKAN_HPP_NAMESPACE::QueryType queryType,
  3476. size_t stride ) const;
  3477. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
  3478. getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo ) const VULKAN_HPP_NOEXCEPT;
  3479. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT
  3480. getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
  3481. const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo ) const VULKAN_HPP_NOEXCEPT;
  3482. //=== VK_KHR_maintenance4 ===
  3483. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
  3484. getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
  3485. template <typename X, typename Y, typename... Z>
  3486. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  3487. getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
  3488. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
  3489. getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
  3490. template <typename X, typename Y, typename... Z>
  3491. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  3492. getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT;
  3493. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
  3494. getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const;
  3495. //=== VK_VALVE_descriptor_set_host_mapping ===
  3496. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE getDescriptorSetLayoutHostMappingInfoVALVE(
  3497. const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference ) const VULKAN_HPP_NOEXCEPT;
  3498. //=== VK_NV_device_generated_commands_compute ===
  3499. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2
  3500. getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
  3501. template <typename X, typename Y, typename... Z>
  3502. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  3503. getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
  3504. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceAddress
  3505. getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info ) const VULKAN_HPP_NOEXCEPT;
  3506. //=== VK_EXT_shader_module_identifier ===
  3507. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT
  3508. getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT;
  3509. //=== VK_NV_optical_flow ===
  3510. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV
  3511. createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo,
  3512. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3513. //=== VK_KHR_maintenance5 ===
  3514. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D
  3515. getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT;
  3516. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
  3517. getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
  3518. template <typename X, typename Y, typename... Z>
  3519. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  3520. getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
  3521. //=== VK_EXT_shader_object ===
  3522. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>
  3523. createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
  3524. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3525. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::ShaderEXT
  3526. createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo,
  3527. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  3528. //=== VK_QCOM_tile_properties ===
  3529. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::TilePropertiesQCOM
  3530. getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT;
  3531. # if defined( VK_USE_PLATFORM_SCREEN_QNX )
  3532. //=== VK_QNX_external_memory_screen_buffer ===
  3533. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const;
  3534. template <typename X, typename Y, typename... Z>
  3535. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const;
  3536. # endif /*VK_USE_PLATFORM_SCREEN_QNX*/
  3537. private:
  3538. VULKAN_HPP_NAMESPACE::Device m_device = {};
  3539. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  3540. std::unique_ptr<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher> m_dispatcher;
  3541. };
  3542. class AccelerationStructureKHR
  3543. {
  3544. public:
  3545. using CType = VkAccelerationStructureKHR;
  3546. using CppType = vk::AccelerationStructureKHR;
  3547. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR;
  3548. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  3549. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR;
  3550. public:
  3551. AccelerationStructureKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  3552. VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo,
  3553. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  3554. : m_device( *device )
  3555. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  3556. , m_dispatcher( device.getDispatcher() )
  3557. {
  3558. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  3559. device.getDispatcher()->vkCreateAccelerationStructureKHR( static_cast<VkDevice>( *device ),
  3560. reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
  3561. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  3562. reinterpret_cast<VkAccelerationStructureKHR *>( &m_accelerationStructure ) ) );
  3563. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  3564. {
  3565. detail::throwResultException( result, "vkCreateAccelerationStructureKHR" );
  3566. }
  3567. }
  3568. AccelerationStructureKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  3569. VkAccelerationStructureKHR accelerationStructure,
  3570. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  3571. : m_device( *device )
  3572. , m_accelerationStructure( accelerationStructure )
  3573. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  3574. , m_dispatcher( device.getDispatcher() )
  3575. {
  3576. }
  3577. AccelerationStructureKHR( std::nullptr_t ) {}
  3578. ~AccelerationStructureKHR()
  3579. {
  3580. clear();
  3581. }
  3582. AccelerationStructureKHR() = delete;
  3583. AccelerationStructureKHR( AccelerationStructureKHR const & ) = delete;
  3584. AccelerationStructureKHR( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT
  3585. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  3586. , m_accelerationStructure( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) )
  3587. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  3588. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  3589. {
  3590. }
  3591. AccelerationStructureKHR & operator=( AccelerationStructureKHR const & ) = delete;
  3592. AccelerationStructureKHR & operator =( AccelerationStructureKHR && rhs ) VULKAN_HPP_NOEXCEPT
  3593. {
  3594. if ( this != &rhs )
  3595. {
  3596. std::swap( m_device, rhs.m_device );
  3597. std::swap( m_accelerationStructure, rhs.m_accelerationStructure );
  3598. std::swap( m_allocator, rhs.m_allocator );
  3599. std::swap( m_dispatcher, rhs.m_dispatcher );
  3600. }
  3601. return *this;
  3602. }
  3603. VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const & operator*() const VULKAN_HPP_NOEXCEPT
  3604. {
  3605. return m_accelerationStructure;
  3606. }
  3607. void clear() VULKAN_HPP_NOEXCEPT
  3608. {
  3609. if ( m_accelerationStructure )
  3610. {
  3611. getDispatcher()->vkDestroyAccelerationStructureKHR( static_cast<VkDevice>( m_device ),
  3612. static_cast<VkAccelerationStructureKHR>( m_accelerationStructure ),
  3613. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  3614. }
  3615. m_device = nullptr;
  3616. m_accelerationStructure = nullptr;
  3617. m_allocator = nullptr;
  3618. m_dispatcher = nullptr;
  3619. }
  3620. VULKAN_HPP_NAMESPACE::AccelerationStructureKHR release()
  3621. {
  3622. m_device = nullptr;
  3623. m_allocator = nullptr;
  3624. m_dispatcher = nullptr;
  3625. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_accelerationStructure, nullptr );
  3626. }
  3627. VULKAN_HPP_NAMESPACE::Device getDevice() const
  3628. {
  3629. return m_device;
  3630. }
  3631. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  3632. {
  3633. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  3634. return m_dispatcher;
  3635. }
  3636. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR & rhs ) VULKAN_HPP_NOEXCEPT
  3637. {
  3638. std::swap( m_device, rhs.m_device );
  3639. std::swap( m_accelerationStructure, rhs.m_accelerationStructure );
  3640. std::swap( m_allocator, rhs.m_allocator );
  3641. std::swap( m_dispatcher, rhs.m_dispatcher );
  3642. }
  3643. private:
  3644. VULKAN_HPP_NAMESPACE::Device m_device = {};
  3645. VULKAN_HPP_NAMESPACE::AccelerationStructureKHR m_accelerationStructure = {};
  3646. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  3647. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  3648. };
  3649. class AccelerationStructureNV
  3650. {
  3651. public:
  3652. using CType = VkAccelerationStructureNV;
  3653. using CppType = vk::AccelerationStructureNV;
  3654. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV;
  3655. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  3656. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV;
  3657. public:
  3658. AccelerationStructureNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  3659. VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo,
  3660. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  3661. : m_device( *device )
  3662. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  3663. , m_dispatcher( device.getDispatcher() )
  3664. {
  3665. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  3666. device.getDispatcher()->vkCreateAccelerationStructureNV( static_cast<VkDevice>( *device ),
  3667. reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
  3668. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  3669. reinterpret_cast<VkAccelerationStructureNV *>( &m_accelerationStructure ) ) );
  3670. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  3671. {
  3672. detail::throwResultException( result, "vkCreateAccelerationStructureNV" );
  3673. }
  3674. }
  3675. AccelerationStructureNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  3676. VkAccelerationStructureNV accelerationStructure,
  3677. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  3678. : m_device( *device )
  3679. , m_accelerationStructure( accelerationStructure )
  3680. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  3681. , m_dispatcher( device.getDispatcher() )
  3682. {
  3683. }
  3684. AccelerationStructureNV( std::nullptr_t ) {}
  3685. ~AccelerationStructureNV()
  3686. {
  3687. clear();
  3688. }
  3689. AccelerationStructureNV() = delete;
  3690. AccelerationStructureNV( AccelerationStructureNV const & ) = delete;
  3691. AccelerationStructureNV( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT
  3692. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  3693. , m_accelerationStructure( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_accelerationStructure, {} ) )
  3694. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  3695. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  3696. {
  3697. }
  3698. AccelerationStructureNV & operator=( AccelerationStructureNV const & ) = delete;
  3699. AccelerationStructureNV & operator =( AccelerationStructureNV && rhs ) VULKAN_HPP_NOEXCEPT
  3700. {
  3701. if ( this != &rhs )
  3702. {
  3703. std::swap( m_device, rhs.m_device );
  3704. std::swap( m_accelerationStructure, rhs.m_accelerationStructure );
  3705. std::swap( m_allocator, rhs.m_allocator );
  3706. std::swap( m_dispatcher, rhs.m_dispatcher );
  3707. }
  3708. return *this;
  3709. }
  3710. VULKAN_HPP_NAMESPACE::AccelerationStructureNV const & operator*() const VULKAN_HPP_NOEXCEPT
  3711. {
  3712. return m_accelerationStructure;
  3713. }
  3714. void clear() VULKAN_HPP_NOEXCEPT
  3715. {
  3716. if ( m_accelerationStructure )
  3717. {
  3718. getDispatcher()->vkDestroyAccelerationStructureNV( static_cast<VkDevice>( m_device ),
  3719. static_cast<VkAccelerationStructureNV>( m_accelerationStructure ),
  3720. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  3721. }
  3722. m_device = nullptr;
  3723. m_accelerationStructure = nullptr;
  3724. m_allocator = nullptr;
  3725. m_dispatcher = nullptr;
  3726. }
  3727. VULKAN_HPP_NAMESPACE::AccelerationStructureNV release()
  3728. {
  3729. m_device = nullptr;
  3730. m_allocator = nullptr;
  3731. m_dispatcher = nullptr;
  3732. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_accelerationStructure, nullptr );
  3733. }
  3734. VULKAN_HPP_NAMESPACE::Device getDevice() const
  3735. {
  3736. return m_device;
  3737. }
  3738. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  3739. {
  3740. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  3741. return m_dispatcher;
  3742. }
  3743. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV & rhs ) VULKAN_HPP_NOEXCEPT
  3744. {
  3745. std::swap( m_device, rhs.m_device );
  3746. std::swap( m_accelerationStructure, rhs.m_accelerationStructure );
  3747. std::swap( m_allocator, rhs.m_allocator );
  3748. std::swap( m_dispatcher, rhs.m_dispatcher );
  3749. }
  3750. //=== VK_NV_ray_tracing ===
  3751. template <typename DataType>
  3752. VULKAN_HPP_NODISCARD std::vector<DataType> getHandle( size_t dataSize ) const;
  3753. template <typename DataType>
  3754. VULKAN_HPP_NODISCARD DataType getHandle() const;
  3755. private:
  3756. VULKAN_HPP_NAMESPACE::Device m_device = {};
  3757. VULKAN_HPP_NAMESPACE::AccelerationStructureNV m_accelerationStructure = {};
  3758. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  3759. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  3760. };
  3761. class Buffer
  3762. {
  3763. public:
  3764. using CType = VkBuffer;
  3765. using CppType = vk::Buffer;
  3766. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBuffer;
  3767. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  3768. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer;
  3769. public:
  3770. Buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  3771. VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo,
  3772. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  3773. : m_device( *device )
  3774. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  3775. , m_dispatcher( device.getDispatcher() )
  3776. {
  3777. VULKAN_HPP_NAMESPACE::Result result =
  3778. static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateBuffer( static_cast<VkDevice>( *device ),
  3779. reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
  3780. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  3781. reinterpret_cast<VkBuffer *>( &m_buffer ) ) );
  3782. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  3783. {
  3784. detail::throwResultException( result, "vkCreateBuffer" );
  3785. }
  3786. }
  3787. Buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  3788. VkBuffer buffer,
  3789. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  3790. : m_device( *device )
  3791. , m_buffer( buffer )
  3792. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  3793. , m_dispatcher( device.getDispatcher() )
  3794. {
  3795. }
  3796. Buffer( std::nullptr_t ) {}
  3797. ~Buffer()
  3798. {
  3799. clear();
  3800. }
  3801. Buffer() = delete;
  3802. Buffer( Buffer const & ) = delete;
  3803. Buffer( Buffer && rhs ) VULKAN_HPP_NOEXCEPT
  3804. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  3805. , m_buffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_buffer, {} ) )
  3806. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  3807. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  3808. {
  3809. }
  3810. Buffer & operator=( Buffer const & ) = delete;
  3811. Buffer & operator =( Buffer && rhs ) VULKAN_HPP_NOEXCEPT
  3812. {
  3813. if ( this != &rhs )
  3814. {
  3815. std::swap( m_device, rhs.m_device );
  3816. std::swap( m_buffer, rhs.m_buffer );
  3817. std::swap( m_allocator, rhs.m_allocator );
  3818. std::swap( m_dispatcher, rhs.m_dispatcher );
  3819. }
  3820. return *this;
  3821. }
  3822. VULKAN_HPP_NAMESPACE::Buffer const & operator*() const VULKAN_HPP_NOEXCEPT
  3823. {
  3824. return m_buffer;
  3825. }
  3826. void clear() VULKAN_HPP_NOEXCEPT
  3827. {
  3828. if ( m_buffer )
  3829. {
  3830. getDispatcher()->vkDestroyBuffer(
  3831. static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( m_buffer ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  3832. }
  3833. m_device = nullptr;
  3834. m_buffer = nullptr;
  3835. m_allocator = nullptr;
  3836. m_dispatcher = nullptr;
  3837. }
  3838. VULKAN_HPP_NAMESPACE::Buffer release()
  3839. {
  3840. m_device = nullptr;
  3841. m_allocator = nullptr;
  3842. m_dispatcher = nullptr;
  3843. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_buffer, nullptr );
  3844. }
  3845. VULKAN_HPP_NAMESPACE::Device getDevice() const
  3846. {
  3847. return m_device;
  3848. }
  3849. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  3850. {
  3851. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  3852. return m_dispatcher;
  3853. }
  3854. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Buffer & rhs ) VULKAN_HPP_NOEXCEPT
  3855. {
  3856. std::swap( m_device, rhs.m_device );
  3857. std::swap( m_buffer, rhs.m_buffer );
  3858. std::swap( m_allocator, rhs.m_allocator );
  3859. std::swap( m_dispatcher, rhs.m_dispatcher );
  3860. }
  3861. //=== VK_VERSION_1_0 ===
  3862. void bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const;
  3863. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getMemoryRequirements() const VULKAN_HPP_NOEXCEPT;
  3864. private:
  3865. VULKAN_HPP_NAMESPACE::Device m_device = {};
  3866. VULKAN_HPP_NAMESPACE::Buffer m_buffer = {};
  3867. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  3868. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  3869. };
  3870. # if defined( VK_USE_PLATFORM_FUCHSIA )
  3871. class BufferCollectionFUCHSIA
  3872. {
  3873. public:
  3874. using CType = VkBufferCollectionFUCHSIA;
  3875. using CppType = vk::BufferCollectionFUCHSIA;
  3876. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferCollectionFUCHSIA;
  3877. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  3878. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferCollectionFUCHSIA;
  3879. public:
  3880. BufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  3881. VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo,
  3882. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  3883. : m_device( *device )
  3884. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  3885. , m_dispatcher( device.getDispatcher() )
  3886. {
  3887. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  3888. device.getDispatcher()->vkCreateBufferCollectionFUCHSIA( static_cast<VkDevice>( *device ),
  3889. reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ),
  3890. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  3891. reinterpret_cast<VkBufferCollectionFUCHSIA *>( &m_collection ) ) );
  3892. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  3893. {
  3894. detail::throwResultException( result, "vkCreateBufferCollectionFUCHSIA" );
  3895. }
  3896. }
  3897. BufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  3898. VkBufferCollectionFUCHSIA collection,
  3899. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  3900. : m_device( *device )
  3901. , m_collection( collection )
  3902. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  3903. , m_dispatcher( device.getDispatcher() )
  3904. {
  3905. }
  3906. BufferCollectionFUCHSIA( std::nullptr_t ) {}
  3907. ~BufferCollectionFUCHSIA()
  3908. {
  3909. clear();
  3910. }
  3911. BufferCollectionFUCHSIA() = delete;
  3912. BufferCollectionFUCHSIA( BufferCollectionFUCHSIA const & ) = delete;
  3913. BufferCollectionFUCHSIA( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT
  3914. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  3915. , m_collection( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_collection, {} ) )
  3916. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  3917. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  3918. {
  3919. }
  3920. BufferCollectionFUCHSIA & operator=( BufferCollectionFUCHSIA const & ) = delete;
  3921. BufferCollectionFUCHSIA & operator =( BufferCollectionFUCHSIA && rhs ) VULKAN_HPP_NOEXCEPT
  3922. {
  3923. if ( this != &rhs )
  3924. {
  3925. std::swap( m_device, rhs.m_device );
  3926. std::swap( m_collection, rhs.m_collection );
  3927. std::swap( m_allocator, rhs.m_allocator );
  3928. std::swap( m_dispatcher, rhs.m_dispatcher );
  3929. }
  3930. return *this;
  3931. }
  3932. VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA const & operator*() const VULKAN_HPP_NOEXCEPT
  3933. {
  3934. return m_collection;
  3935. }
  3936. void clear() VULKAN_HPP_NOEXCEPT
  3937. {
  3938. if ( m_collection )
  3939. {
  3940. getDispatcher()->vkDestroyBufferCollectionFUCHSIA( static_cast<VkDevice>( m_device ),
  3941. static_cast<VkBufferCollectionFUCHSIA>( m_collection ),
  3942. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  3943. }
  3944. m_device = nullptr;
  3945. m_collection = nullptr;
  3946. m_allocator = nullptr;
  3947. m_dispatcher = nullptr;
  3948. }
  3949. VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA release()
  3950. {
  3951. m_device = nullptr;
  3952. m_allocator = nullptr;
  3953. m_dispatcher = nullptr;
  3954. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_collection, nullptr );
  3955. }
  3956. VULKAN_HPP_NAMESPACE::Device getDevice() const
  3957. {
  3958. return m_device;
  3959. }
  3960. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  3961. {
  3962. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  3963. return m_dispatcher;
  3964. }
  3965. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA & rhs ) VULKAN_HPP_NOEXCEPT
  3966. {
  3967. std::swap( m_device, rhs.m_device );
  3968. std::swap( m_collection, rhs.m_collection );
  3969. std::swap( m_allocator, rhs.m_allocator );
  3970. std::swap( m_dispatcher, rhs.m_dispatcher );
  3971. }
  3972. //=== VK_FUCHSIA_buffer_collection ===
  3973. void setImageConstraints( const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo ) const;
  3974. void setBufferConstraints( const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo ) const;
  3975. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA getProperties() const;
  3976. private:
  3977. VULKAN_HPP_NAMESPACE::Device m_device = {};
  3978. VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA m_collection = {};
  3979. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  3980. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  3981. };
  3982. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  3983. class BufferView
  3984. {
  3985. public:
  3986. using CType = VkBufferView;
  3987. using CppType = vk::BufferView;
  3988. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView;
  3989. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  3990. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView;
  3991. public:
  3992. BufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  3993. VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo,
  3994. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  3995. : m_device( *device )
  3996. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  3997. , m_dispatcher( device.getDispatcher() )
  3998. {
  3999. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  4000. device.getDispatcher()->vkCreateBufferView( static_cast<VkDevice>( *device ),
  4001. reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
  4002. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  4003. reinterpret_cast<VkBufferView *>( &m_bufferView ) ) );
  4004. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  4005. {
  4006. detail::throwResultException( result, "vkCreateBufferView" );
  4007. }
  4008. }
  4009. BufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  4010. VkBufferView bufferView,
  4011. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  4012. : m_device( *device )
  4013. , m_bufferView( bufferView )
  4014. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  4015. , m_dispatcher( device.getDispatcher() )
  4016. {
  4017. }
  4018. BufferView( std::nullptr_t ) {}
  4019. ~BufferView()
  4020. {
  4021. clear();
  4022. }
  4023. BufferView() = delete;
  4024. BufferView( BufferView const & ) = delete;
  4025. BufferView( BufferView && rhs ) VULKAN_HPP_NOEXCEPT
  4026. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  4027. , m_bufferView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_bufferView, {} ) )
  4028. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  4029. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  4030. {
  4031. }
  4032. BufferView & operator=( BufferView const & ) = delete;
  4033. BufferView & operator =( BufferView && rhs ) VULKAN_HPP_NOEXCEPT
  4034. {
  4035. if ( this != &rhs )
  4036. {
  4037. std::swap( m_device, rhs.m_device );
  4038. std::swap( m_bufferView, rhs.m_bufferView );
  4039. std::swap( m_allocator, rhs.m_allocator );
  4040. std::swap( m_dispatcher, rhs.m_dispatcher );
  4041. }
  4042. return *this;
  4043. }
  4044. VULKAN_HPP_NAMESPACE::BufferView const & operator*() const VULKAN_HPP_NOEXCEPT
  4045. {
  4046. return m_bufferView;
  4047. }
  4048. void clear() VULKAN_HPP_NOEXCEPT
  4049. {
  4050. if ( m_bufferView )
  4051. {
  4052. getDispatcher()->vkDestroyBufferView(
  4053. static_cast<VkDevice>( m_device ), static_cast<VkBufferView>( m_bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  4054. }
  4055. m_device = nullptr;
  4056. m_bufferView = nullptr;
  4057. m_allocator = nullptr;
  4058. m_dispatcher = nullptr;
  4059. }
  4060. VULKAN_HPP_NAMESPACE::BufferView release()
  4061. {
  4062. m_device = nullptr;
  4063. m_allocator = nullptr;
  4064. m_dispatcher = nullptr;
  4065. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_bufferView, nullptr );
  4066. }
  4067. VULKAN_HPP_NAMESPACE::Device getDevice() const
  4068. {
  4069. return m_device;
  4070. }
  4071. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  4072. {
  4073. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  4074. return m_dispatcher;
  4075. }
  4076. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::BufferView & rhs ) VULKAN_HPP_NOEXCEPT
  4077. {
  4078. std::swap( m_device, rhs.m_device );
  4079. std::swap( m_bufferView, rhs.m_bufferView );
  4080. std::swap( m_allocator, rhs.m_allocator );
  4081. std::swap( m_dispatcher, rhs.m_dispatcher );
  4082. }
  4083. private:
  4084. VULKAN_HPP_NAMESPACE::Device m_device = {};
  4085. VULKAN_HPP_NAMESPACE::BufferView m_bufferView = {};
  4086. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  4087. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  4088. };
  4089. class CommandPool
  4090. {
  4091. public:
  4092. using CType = VkCommandPool;
  4093. using CppType = vk::CommandPool;
  4094. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool;
  4095. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  4096. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool;
  4097. public:
  4098. CommandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  4099. VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo,
  4100. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  4101. : m_device( *device )
  4102. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  4103. , m_dispatcher( device.getDispatcher() )
  4104. {
  4105. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  4106. device.getDispatcher()->vkCreateCommandPool( static_cast<VkDevice>( *device ),
  4107. reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
  4108. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  4109. reinterpret_cast<VkCommandPool *>( &m_commandPool ) ) );
  4110. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  4111. {
  4112. detail::throwResultException( result, "vkCreateCommandPool" );
  4113. }
  4114. }
  4115. CommandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  4116. VkCommandPool commandPool,
  4117. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  4118. : m_device( *device )
  4119. , m_commandPool( commandPool )
  4120. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  4121. , m_dispatcher( device.getDispatcher() )
  4122. {
  4123. }
  4124. CommandPool( std::nullptr_t ) {}
  4125. ~CommandPool()
  4126. {
  4127. clear();
  4128. }
  4129. CommandPool() = delete;
  4130. CommandPool( CommandPool const & ) = delete;
  4131. CommandPool( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT
  4132. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  4133. , m_commandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ) )
  4134. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  4135. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  4136. {
  4137. }
  4138. CommandPool & operator=( CommandPool const & ) = delete;
  4139. CommandPool & operator =( CommandPool && rhs ) VULKAN_HPP_NOEXCEPT
  4140. {
  4141. if ( this != &rhs )
  4142. {
  4143. std::swap( m_device, rhs.m_device );
  4144. std::swap( m_commandPool, rhs.m_commandPool );
  4145. std::swap( m_allocator, rhs.m_allocator );
  4146. std::swap( m_dispatcher, rhs.m_dispatcher );
  4147. }
  4148. return *this;
  4149. }
  4150. VULKAN_HPP_NAMESPACE::CommandPool const & operator*() const VULKAN_HPP_NOEXCEPT
  4151. {
  4152. return m_commandPool;
  4153. }
  4154. void clear() VULKAN_HPP_NOEXCEPT
  4155. {
  4156. if ( m_commandPool )
  4157. {
  4158. getDispatcher()->vkDestroyCommandPool(
  4159. static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  4160. }
  4161. m_device = nullptr;
  4162. m_commandPool = nullptr;
  4163. m_allocator = nullptr;
  4164. m_dispatcher = nullptr;
  4165. }
  4166. VULKAN_HPP_NAMESPACE::CommandPool release()
  4167. {
  4168. m_device = nullptr;
  4169. m_allocator = nullptr;
  4170. m_dispatcher = nullptr;
  4171. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_commandPool, nullptr );
  4172. }
  4173. VULKAN_HPP_NAMESPACE::Device getDevice() const
  4174. {
  4175. return m_device;
  4176. }
  4177. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  4178. {
  4179. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  4180. return m_dispatcher;
  4181. }
  4182. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandPool & rhs ) VULKAN_HPP_NOEXCEPT
  4183. {
  4184. std::swap( m_device, rhs.m_device );
  4185. std::swap( m_commandPool, rhs.m_commandPool );
  4186. std::swap( m_allocator, rhs.m_allocator );
  4187. std::swap( m_dispatcher, rhs.m_dispatcher );
  4188. }
  4189. //=== VK_VERSION_1_0 ===
  4190. void reset( VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
  4191. //=== VK_VERSION_1_1 ===
  4192. void trim( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  4193. //=== VK_KHR_maintenance1 ===
  4194. void trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  4195. private:
  4196. VULKAN_HPP_NAMESPACE::Device m_device = {};
  4197. VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {};
  4198. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  4199. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  4200. };
  4201. class CommandBuffer
  4202. {
  4203. public:
  4204. using CType = VkCommandBuffer;
  4205. using CppType = vk::CommandBuffer;
  4206. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer;
  4207. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  4208. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer;
  4209. public:
  4210. CommandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkCommandBuffer commandBuffer, VkCommandPool commandPool )
  4211. : m_device( *device ), m_commandPool( commandPool ), m_commandBuffer( commandBuffer ), m_dispatcher( device.getDispatcher() )
  4212. {
  4213. }
  4214. CommandBuffer( std::nullptr_t ) {}
  4215. ~CommandBuffer()
  4216. {
  4217. clear();
  4218. }
  4219. CommandBuffer() = delete;
  4220. CommandBuffer( CommandBuffer const & ) = delete;
  4221. CommandBuffer( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT
  4222. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  4223. , m_commandPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandPool, {} ) )
  4224. , m_commandBuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_commandBuffer, {} ) )
  4225. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  4226. {
  4227. }
  4228. CommandBuffer & operator=( CommandBuffer const & ) = delete;
  4229. CommandBuffer & operator =( CommandBuffer && rhs ) VULKAN_HPP_NOEXCEPT
  4230. {
  4231. if ( this != &rhs )
  4232. {
  4233. std::swap( m_device, rhs.m_device );
  4234. std::swap( m_commandPool, rhs.m_commandPool );
  4235. std::swap( m_commandBuffer, rhs.m_commandBuffer );
  4236. std::swap( m_dispatcher, rhs.m_dispatcher );
  4237. }
  4238. return *this;
  4239. }
  4240. VULKAN_HPP_NAMESPACE::CommandBuffer const & operator*() const VULKAN_HPP_NOEXCEPT
  4241. {
  4242. return m_commandBuffer;
  4243. }
  4244. void clear() VULKAN_HPP_NOEXCEPT
  4245. {
  4246. if ( m_commandBuffer )
  4247. {
  4248. getDispatcher()->vkFreeCommandBuffers(
  4249. static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), 1, reinterpret_cast<VkCommandBuffer const *>( &m_commandBuffer ) );
  4250. }
  4251. m_device = nullptr;
  4252. m_commandPool = nullptr;
  4253. m_commandBuffer = nullptr;
  4254. m_dispatcher = nullptr;
  4255. }
  4256. VULKAN_HPP_NAMESPACE::CommandBuffer release()
  4257. {
  4258. m_device = nullptr;
  4259. m_commandPool = nullptr;
  4260. m_dispatcher = nullptr;
  4261. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_commandBuffer, nullptr );
  4262. }
  4263. VULKAN_HPP_NAMESPACE::Device getDevice() const
  4264. {
  4265. return m_device;
  4266. }
  4267. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  4268. {
  4269. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  4270. return m_dispatcher;
  4271. }
  4272. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandBuffer & rhs ) VULKAN_HPP_NOEXCEPT
  4273. {
  4274. std::swap( m_device, rhs.m_device );
  4275. std::swap( m_commandPool, rhs.m_commandPool );
  4276. std::swap( m_commandBuffer, rhs.m_commandBuffer );
  4277. std::swap( m_dispatcher, rhs.m_dispatcher );
  4278. }
  4279. //=== VK_VERSION_1_0 ===
  4280. void begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo ) const;
  4281. void end() const;
  4282. void reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
  4283. void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT;
  4284. void setViewport( uint32_t firstViewport,
  4285. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT;
  4286. void setScissor( uint32_t firstScissor, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT;
  4287. void setLineWidth( float lineWidth ) const VULKAN_HPP_NOEXCEPT;
  4288. void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT;
  4289. void setBlendConstants( const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT;
  4290. void setDepthBounds( float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT;
  4291. void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT;
  4292. void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT;
  4293. void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT;
  4294. void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  4295. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  4296. uint32_t firstSet,
  4297. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
  4298. VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets ) const VULKAN_HPP_NOEXCEPT;
  4299. void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
  4300. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  4301. VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT;
  4302. void bindVertexBuffers( uint32_t firstBinding,
  4303. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
  4304. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets ) const;
  4305. void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT;
  4306. void
  4307. drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT;
  4308. void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
  4309. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  4310. uint32_t drawCount,
  4311. uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
  4312. void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
  4313. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  4314. uint32_t drawCount,
  4315. uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
  4316. void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT;
  4317. void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT;
  4318. void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
  4319. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4320. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions ) const VULKAN_HPP_NOEXCEPT;
  4321. void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
  4322. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4323. VULKAN_HPP_NAMESPACE::Image dstImage,
  4324. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4325. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT;
  4326. void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
  4327. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4328. VULKAN_HPP_NAMESPACE::Image dstImage,
  4329. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4330. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
  4331. VULKAN_HPP_NAMESPACE::Filter filter ) const VULKAN_HPP_NOEXCEPT;
  4332. void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
  4333. VULKAN_HPP_NAMESPACE::Image dstImage,
  4334. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4335. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT;
  4336. void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
  4337. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4338. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4339. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT;
  4340. template <typename DataType>
  4341. void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4342. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  4343. VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data ) const VULKAN_HPP_NOEXCEPT;
  4344. void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4345. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  4346. VULKAN_HPP_NAMESPACE::DeviceSize size,
  4347. uint32_t data ) const VULKAN_HPP_NOEXCEPT;
  4348. void clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
  4349. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
  4350. const VULKAN_HPP_NAMESPACE::ClearColorValue & color,
  4351. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const VULKAN_HPP_NOEXCEPT;
  4352. void
  4353. clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
  4354. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
  4355. const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,
  4356. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const VULKAN_HPP_NOEXCEPT;
  4357. void clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
  4358. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects ) const VULKAN_HPP_NOEXCEPT;
  4359. void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
  4360. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4361. VULKAN_HPP_NAMESPACE::Image dstImage,
  4362. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4363. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions ) const VULKAN_HPP_NOEXCEPT;
  4364. void setEvent( VULKAN_HPP_NAMESPACE::Event event,
  4365. VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  4366. void resetEvent( VULKAN_HPP_NAMESPACE::Event event,
  4367. VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  4368. void waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
  4369. VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
  4370. VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
  4371. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
  4372. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
  4373. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT;
  4374. void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
  4375. VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
  4376. VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
  4377. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
  4378. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
  4379. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const
  4380. VULKAN_HPP_NOEXCEPT;
  4381. void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  4382. uint32_t query,
  4383. VULKAN_HPP_NAMESPACE::QueryControlFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  4384. void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT;
  4385. void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT;
  4386. void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
  4387. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  4388. uint32_t query ) const VULKAN_HPP_NOEXCEPT;
  4389. void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  4390. uint32_t firstQuery,
  4391. uint32_t queryCount,
  4392. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4393. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  4394. VULKAN_HPP_NAMESPACE::DeviceSize stride,
  4395. VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  4396. template <typename ValuesType>
  4397. void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  4398. VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
  4399. uint32_t offset,
  4400. VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values ) const VULKAN_HPP_NOEXCEPT;
  4401. void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
  4402. VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT;
  4403. void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT;
  4404. void endRenderPass() const VULKAN_HPP_NOEXCEPT;
  4405. void executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers ) const VULKAN_HPP_NOEXCEPT;
  4406. //=== VK_VERSION_1_1 ===
  4407. void setDeviceMask( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT;
  4408. void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const
  4409. VULKAN_HPP_NOEXCEPT;
  4410. //=== VK_VERSION_1_2 ===
  4411. void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
  4412. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  4413. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  4414. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  4415. uint32_t maxDrawCount,
  4416. uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
  4417. void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
  4418. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  4419. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  4420. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  4421. uint32_t maxDrawCount,
  4422. uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
  4423. void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
  4424. const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT;
  4425. void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
  4426. const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT;
  4427. void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT;
  4428. //=== VK_VERSION_1_3 ===
  4429. void setEvent2( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT;
  4430. void resetEvent2( VULKAN_HPP_NAMESPACE::Event event,
  4431. VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  4432. void waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
  4433. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const;
  4434. void pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT;
  4435. void
  4436. writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT;
  4437. void copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT;
  4438. void copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT;
  4439. void copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT;
  4440. void copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT;
  4441. void blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT;
  4442. void resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT;
  4443. void beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT;
  4444. void endRendering() const VULKAN_HPP_NOEXCEPT;
  4445. void setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  4446. void setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT;
  4447. void setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT;
  4448. void setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT;
  4449. void setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT;
  4450. void bindVertexBuffers2(
  4451. uint32_t firstBinding,
  4452. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
  4453. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
  4454. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
  4455. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
  4456. void setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT;
  4457. void setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT;
  4458. void setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT;
  4459. void setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT;
  4460. void setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT;
  4461. void setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
  4462. VULKAN_HPP_NAMESPACE::StencilOp failOp,
  4463. VULKAN_HPP_NAMESPACE::StencilOp passOp,
  4464. VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
  4465. VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT;
  4466. void setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT;
  4467. void setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT;
  4468. void setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT;
  4469. //=== VK_EXT_debug_marker ===
  4470. void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT;
  4471. void debugMarkerEndEXT() const VULKAN_HPP_NOEXCEPT;
  4472. void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT;
  4473. //=== VK_KHR_video_queue ===
  4474. void beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const VULKAN_HPP_NOEXCEPT;
  4475. void endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const VULKAN_HPP_NOEXCEPT;
  4476. void controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const VULKAN_HPP_NOEXCEPT;
  4477. //=== VK_KHR_video_decode_queue ===
  4478. void decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo ) const VULKAN_HPP_NOEXCEPT;
  4479. //=== VK_EXT_transform_feedback ===
  4480. void bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
  4481. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
  4482. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
  4483. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes
  4484. VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
  4485. void beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
  4486. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
  4487. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets
  4488. VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
  4489. void endTransformFeedbackEXT( uint32_t firstCounterBuffer,
  4490. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
  4491. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets
  4492. VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
  4493. void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  4494. uint32_t query,
  4495. VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
  4496. uint32_t index ) const VULKAN_HPP_NOEXCEPT;
  4497. void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT;
  4498. void drawIndirectByteCountEXT( uint32_t instanceCount,
  4499. uint32_t firstInstance,
  4500. VULKAN_HPP_NAMESPACE::Buffer counterBuffer,
  4501. VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,
  4502. uint32_t counterOffset,
  4503. uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT;
  4504. //=== VK_NVX_binary_import ===
  4505. void cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo ) const VULKAN_HPP_NOEXCEPT;
  4506. //=== VK_AMD_draw_indirect_count ===
  4507. void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
  4508. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  4509. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  4510. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  4511. uint32_t maxDrawCount,
  4512. uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
  4513. void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
  4514. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  4515. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  4516. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  4517. uint32_t maxDrawCount,
  4518. uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
  4519. //=== VK_KHR_dynamic_rendering ===
  4520. void beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT;
  4521. void endRenderingKHR() const VULKAN_HPP_NOEXCEPT;
  4522. //=== VK_KHR_device_group ===
  4523. void setDeviceMaskKHR( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT;
  4524. void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ )
  4525. const VULKAN_HPP_NOEXCEPT;
  4526. //=== VK_KHR_push_descriptor ===
  4527. void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  4528. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  4529. uint32_t set,
  4530. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites ) const
  4531. VULKAN_HPP_NOEXCEPT;
  4532. template <typename DataType>
  4533. void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  4534. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  4535. uint32_t set,
  4536. DataType const & data ) const VULKAN_HPP_NOEXCEPT;
  4537. //=== VK_EXT_conditional_rendering ===
  4538. void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT;
  4539. void endConditionalRenderingEXT() const VULKAN_HPP_NOEXCEPT;
  4540. //=== VK_NV_clip_space_w_scaling ===
  4541. void setViewportWScalingNV( uint32_t firstViewport,
  4542. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings ) const
  4543. VULKAN_HPP_NOEXCEPT;
  4544. //=== VK_EXT_discard_rectangles ===
  4545. void setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
  4546. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles ) const VULKAN_HPP_NOEXCEPT;
  4547. void setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT;
  4548. void setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT;
  4549. //=== VK_KHR_create_renderpass2 ===
  4550. void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
  4551. const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT;
  4552. void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
  4553. const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT;
  4554. void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT;
  4555. //=== VK_EXT_debug_utils ===
  4556. void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT;
  4557. void endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT;
  4558. void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT;
  4559. # if defined( VK_ENABLE_BETA_EXTENSIONS )
  4560. //=== VK_AMDX_shader_enqueue ===
  4561. void initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT;
  4562. void dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
  4563. const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT;
  4564. void dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
  4565. const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT;
  4566. void dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
  4567. VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT;
  4568. # endif /*VK_ENABLE_BETA_EXTENSIONS*/
  4569. //=== VK_EXT_sample_locations ===
  4570. void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT;
  4571. //=== VK_KHR_acceleration_structure ===
  4572. void buildAccelerationStructuresKHR(
  4573. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
  4574. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos ) const;
  4575. void buildAccelerationStructuresIndirectKHR(
  4576. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
  4577. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
  4578. VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides,
  4579. VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts ) const;
  4580. void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
  4581. void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
  4582. void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT;
  4583. void writeAccelerationStructuresPropertiesKHR(
  4584. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
  4585. VULKAN_HPP_NAMESPACE::QueryType queryType,
  4586. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  4587. uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT;
  4588. //=== VK_KHR_ray_tracing_pipeline ===
  4589. void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
  4590. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
  4591. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
  4592. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
  4593. uint32_t width,
  4594. uint32_t height,
  4595. uint32_t depth ) const VULKAN_HPP_NOEXCEPT;
  4596. void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
  4597. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
  4598. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
  4599. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
  4600. VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT;
  4601. void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT;
  4602. //=== VK_NV_shading_rate_image ===
  4603. void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT;
  4604. void setViewportShadingRatePaletteNV(
  4605. uint32_t firstViewport,
  4606. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes ) const VULKAN_HPP_NOEXCEPT;
  4607. void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
  4608. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders ) const
  4609. VULKAN_HPP_NOEXCEPT;
  4610. //=== VK_NV_ray_tracing ===
  4611. void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info,
  4612. VULKAN_HPP_NAMESPACE::Buffer instanceData,
  4613. VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
  4614. VULKAN_HPP_NAMESPACE::Bool32 update,
  4615. VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
  4616. VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
  4617. VULKAN_HPP_NAMESPACE::Buffer scratch,
  4618. VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT;
  4619. void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
  4620. VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
  4621. VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT;
  4622. void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer,
  4623. VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,
  4624. VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer,
  4625. VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,
  4626. VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,
  4627. VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer,
  4628. VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,
  4629. VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,
  4630. VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,
  4631. VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,
  4632. VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,
  4633. uint32_t width,
  4634. uint32_t height,
  4635. uint32_t depth ) const VULKAN_HPP_NOEXCEPT;
  4636. void writeAccelerationStructuresPropertiesNV(
  4637. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
  4638. VULKAN_HPP_NAMESPACE::QueryType queryType,
  4639. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  4640. uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT;
  4641. //=== VK_KHR_draw_indirect_count ===
  4642. void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
  4643. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  4644. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  4645. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  4646. uint32_t maxDrawCount,
  4647. uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
  4648. void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
  4649. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  4650. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  4651. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  4652. uint32_t maxDrawCount,
  4653. uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
  4654. //=== VK_AMD_buffer_marker ===
  4655. void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
  4656. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4657. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  4658. uint32_t marker ) const VULKAN_HPP_NOEXCEPT;
  4659. //=== VK_NV_mesh_shader ===
  4660. void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT;
  4661. void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
  4662. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  4663. uint32_t drawCount,
  4664. uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
  4665. void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
  4666. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  4667. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  4668. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  4669. uint32_t maxDrawCount,
  4670. uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
  4671. //=== VK_NV_scissor_exclusive ===
  4672. void setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor,
  4673. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & exclusiveScissorEnables ) const
  4674. VULKAN_HPP_NOEXCEPT;
  4675. void setExclusiveScissorNV( uint32_t firstExclusiveScissor,
  4676. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors ) const VULKAN_HPP_NOEXCEPT;
  4677. //=== VK_NV_device_diagnostic_checkpoints ===
  4678. template <typename CheckpointMarkerType>
  4679. void setCheckpointNV( CheckpointMarkerType const & checkpointMarker ) const VULKAN_HPP_NOEXCEPT;
  4680. //=== VK_INTEL_performance_query ===
  4681. void setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo ) const;
  4682. void setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const;
  4683. void setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo ) const;
  4684. //=== VK_KHR_fragment_shading_rate ===
  4685. void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,
  4686. const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT;
  4687. //=== VK_EXT_line_rasterization ===
  4688. void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT;
  4689. //=== VK_EXT_extended_dynamic_state ===
  4690. void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  4691. void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT;
  4692. void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT;
  4693. void setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT;
  4694. void setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT;
  4695. void bindVertexBuffers2EXT(
  4696. uint32_t firstBinding,
  4697. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
  4698. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
  4699. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
  4700. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const;
  4701. void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT;
  4702. void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT;
  4703. void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT;
  4704. void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT;
  4705. void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT;
  4706. void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
  4707. VULKAN_HPP_NAMESPACE::StencilOp failOp,
  4708. VULKAN_HPP_NAMESPACE::StencilOp passOp,
  4709. VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
  4710. VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT;
  4711. //=== VK_NV_device_generated_commands ===
  4712. void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT;
  4713. void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
  4714. const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT;
  4715. void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  4716. VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  4717. uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT;
  4718. //=== VK_EXT_depth_bias_control ===
  4719. void setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo ) const VULKAN_HPP_NOEXCEPT;
  4720. # if defined( VK_ENABLE_BETA_EXTENSIONS )
  4721. //=== VK_KHR_video_encode_queue ===
  4722. void encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const VULKAN_HPP_NOEXCEPT;
  4723. # endif /*VK_ENABLE_BETA_EXTENSIONS*/
  4724. //=== VK_KHR_synchronization2 ===
  4725. void setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT;
  4726. void resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
  4727. VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  4728. void waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
  4729. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const;
  4730. void pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT;
  4731. void writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
  4732. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  4733. uint32_t query ) const VULKAN_HPP_NOEXCEPT;
  4734. void writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
  4735. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4736. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  4737. uint32_t marker ) const VULKAN_HPP_NOEXCEPT;
  4738. //=== VK_EXT_descriptor_buffer ===
  4739. void bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT> const & bindingInfos ) const
  4740. VULKAN_HPP_NOEXCEPT;
  4741. void setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  4742. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  4743. uint32_t firstSet,
  4744. VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & bufferIndices,
  4745. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets ) const;
  4746. void bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  4747. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  4748. uint32_t set ) const VULKAN_HPP_NOEXCEPT;
  4749. //=== VK_NV_fragment_shading_rate_enums ===
  4750. void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,
  4751. const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT;
  4752. //=== VK_EXT_mesh_shader ===
  4753. void drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT;
  4754. void drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
  4755. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  4756. uint32_t drawCount,
  4757. uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
  4758. void drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
  4759. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  4760. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  4761. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  4762. uint32_t maxDrawCount,
  4763. uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
  4764. //=== VK_KHR_copy_commands2 ===
  4765. void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT;
  4766. void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT;
  4767. void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT;
  4768. void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT;
  4769. void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT;
  4770. void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT;
  4771. //=== VK_EXT_vertex_input_dynamic_state ===
  4772. void setVertexInputEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
  4773. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const &
  4774. vertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT;
  4775. //=== VK_HUAWEI_subpass_shading ===
  4776. void subpassShadingHUAWEI() const VULKAN_HPP_NOEXCEPT;
  4777. //=== VK_HUAWEI_invocation_mask ===
  4778. void bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT;
  4779. //=== VK_EXT_extended_dynamic_state2 ===
  4780. void setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT;
  4781. void setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT;
  4782. void setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT;
  4783. void setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT;
  4784. void setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT;
  4785. //=== VK_EXT_color_write_enable ===
  4786. void setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables ) const VULKAN_HPP_NOEXCEPT;
  4787. //=== VK_KHR_ray_tracing_maintenance1 ===
  4788. void traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT;
  4789. //=== VK_EXT_multi_draw ===
  4790. void drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,
  4791. uint32_t instanceCount,
  4792. uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT;
  4793. void drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,
  4794. uint32_t instanceCount,
  4795. uint32_t firstInstance,
  4796. Optional<const int32_t> vertexOffset VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  4797. //=== VK_EXT_opacity_micromap ===
  4798. void buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const VULKAN_HPP_NOEXCEPT;
  4799. void copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT;
  4800. void copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const VULKAN_HPP_NOEXCEPT;
  4801. void copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT;
  4802. void writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
  4803. VULKAN_HPP_NAMESPACE::QueryType queryType,
  4804. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  4805. uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT;
  4806. //=== VK_HUAWEI_cluster_culling_shader ===
  4807. void drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT;
  4808. void drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT;
  4809. //=== VK_NV_copy_memory_indirect ===
  4810. void copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
  4811. void copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,
  4812. uint32_t stride,
  4813. VULKAN_HPP_NAMESPACE::Image dstImage,
  4814. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4815. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers> const & imageSubresources ) const
  4816. VULKAN_HPP_NOEXCEPT;
  4817. //=== VK_NV_memory_decompression ===
  4818. void decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV> const & decompressMemoryRegions ) const
  4819. VULKAN_HPP_NOEXCEPT;
  4820. void decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress,
  4821. VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress,
  4822. uint32_t stride ) const VULKAN_HPP_NOEXCEPT;
  4823. //=== VK_NV_device_generated_commands_compute ===
  4824. void updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  4825. VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT;
  4826. //=== VK_EXT_extended_dynamic_state3 ===
  4827. void setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT;
  4828. void setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT;
  4829. void setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT;
  4830. void setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT;
  4831. void setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
  4832. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask ) const;
  4833. void setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT;
  4834. void setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT;
  4835. void setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT;
  4836. void setColorBlendEnableEXT( uint32_t firstAttachment,
  4837. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables ) const VULKAN_HPP_NOEXCEPT;
  4838. void setColorBlendEquationEXT( uint32_t firstAttachment,
  4839. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations ) const
  4840. VULKAN_HPP_NOEXCEPT;
  4841. void setColorWriteMaskEXT( uint32_t firstAttachment,
  4842. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks ) const
  4843. VULKAN_HPP_NOEXCEPT;
  4844. void setRasterizationStreamEXT( uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT;
  4845. void
  4846. setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT;
  4847. void setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT;
  4848. void setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT;
  4849. void setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT;
  4850. void setColorBlendAdvancedEXT( uint32_t firstAttachment,
  4851. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced ) const
  4852. VULKAN_HPP_NOEXCEPT;
  4853. void setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT;
  4854. void setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT;
  4855. void setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT;
  4856. void setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT;
  4857. void setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT;
  4858. void setViewportSwizzleNV( uint32_t firstViewport,
  4859. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles ) const
  4860. VULKAN_HPP_NOEXCEPT;
  4861. void setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT;
  4862. void setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT;
  4863. void setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT;
  4864. void setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT;
  4865. void setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable ) const VULKAN_HPP_NOEXCEPT;
  4866. void setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT;
  4867. void setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT;
  4868. void setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT;
  4869. //=== VK_NV_optical_flow ===
  4870. void opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
  4871. const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT;
  4872. //=== VK_KHR_maintenance5 ===
  4873. void bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
  4874. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  4875. VULKAN_HPP_NAMESPACE::DeviceSize size,
  4876. VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT;
  4877. //=== VK_EXT_shader_object ===
  4878. void bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,
  4879. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders ) const;
  4880. //=== VK_EXT_attachment_feedback_loop_dynamic_state ===
  4881. void setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const
  4882. VULKAN_HPP_NOEXCEPT;
  4883. private:
  4884. VULKAN_HPP_NAMESPACE::Device m_device = {};
  4885. VULKAN_HPP_NAMESPACE::CommandPool m_commandPool = {};
  4886. VULKAN_HPP_NAMESPACE::CommandBuffer m_commandBuffer = {};
  4887. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  4888. };
  4889. class CommandBuffers : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CommandBuffer>
  4890. {
  4891. public:
  4892. CommandBuffers( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  4893. VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo )
  4894. {
  4895. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
  4896. std::vector<VkCommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
  4897. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkAllocateCommandBuffers(
  4898. static_cast<VkDevice>( *device ), reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), commandBuffers.data() ) );
  4899. if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
  4900. {
  4901. this->reserve( allocateInfo.commandBufferCount );
  4902. for ( auto const & commandBuffer : commandBuffers )
  4903. {
  4904. this->emplace_back( device, commandBuffer, static_cast<VkCommandPool>( allocateInfo.commandPool ) );
  4905. }
  4906. }
  4907. else
  4908. {
  4909. detail::throwResultException( result, "vkAllocateCommandBuffers" );
  4910. }
  4911. }
  4912. CommandBuffers( std::nullptr_t ) {}
  4913. CommandBuffers() = delete;
  4914. CommandBuffers( CommandBuffers const & ) = delete;
  4915. CommandBuffers( CommandBuffers && rhs ) = default;
  4916. CommandBuffers & operator=( CommandBuffers const & ) = delete;
  4917. CommandBuffers & operator=( CommandBuffers && rhs ) = default;
  4918. };
  4919. class CuFunctionNVX
  4920. {
  4921. public:
  4922. using CType = VkCuFunctionNVX;
  4923. using CppType = vk::CuFunctionNVX;
  4924. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuFunctionNVX;
  4925. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  4926. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuFunctionNVX;
  4927. public:
  4928. CuFunctionNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  4929. VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo,
  4930. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  4931. : m_device( *device )
  4932. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  4933. , m_dispatcher( device.getDispatcher() )
  4934. {
  4935. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  4936. device.getDispatcher()->vkCreateCuFunctionNVX( static_cast<VkDevice>( *device ),
  4937. reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
  4938. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  4939. reinterpret_cast<VkCuFunctionNVX *>( &m_function ) ) );
  4940. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  4941. {
  4942. detail::throwResultException( result, "vkCreateCuFunctionNVX" );
  4943. }
  4944. }
  4945. CuFunctionNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  4946. VkCuFunctionNVX function,
  4947. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  4948. : m_device( *device )
  4949. , m_function( function )
  4950. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  4951. , m_dispatcher( device.getDispatcher() )
  4952. {
  4953. }
  4954. CuFunctionNVX( std::nullptr_t ) {}
  4955. ~CuFunctionNVX()
  4956. {
  4957. clear();
  4958. }
  4959. CuFunctionNVX() = delete;
  4960. CuFunctionNVX( CuFunctionNVX const & ) = delete;
  4961. CuFunctionNVX( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT
  4962. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  4963. , m_function( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_function, {} ) )
  4964. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  4965. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  4966. {
  4967. }
  4968. CuFunctionNVX & operator=( CuFunctionNVX const & ) = delete;
  4969. CuFunctionNVX & operator =( CuFunctionNVX && rhs ) VULKAN_HPP_NOEXCEPT
  4970. {
  4971. if ( this != &rhs )
  4972. {
  4973. std::swap( m_device, rhs.m_device );
  4974. std::swap( m_function, rhs.m_function );
  4975. std::swap( m_allocator, rhs.m_allocator );
  4976. std::swap( m_dispatcher, rhs.m_dispatcher );
  4977. }
  4978. return *this;
  4979. }
  4980. VULKAN_HPP_NAMESPACE::CuFunctionNVX const & operator*() const VULKAN_HPP_NOEXCEPT
  4981. {
  4982. return m_function;
  4983. }
  4984. void clear() VULKAN_HPP_NOEXCEPT
  4985. {
  4986. if ( m_function )
  4987. {
  4988. getDispatcher()->vkDestroyCuFunctionNVX(
  4989. static_cast<VkDevice>( m_device ), static_cast<VkCuFunctionNVX>( m_function ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  4990. }
  4991. m_device = nullptr;
  4992. m_function = nullptr;
  4993. m_allocator = nullptr;
  4994. m_dispatcher = nullptr;
  4995. }
  4996. VULKAN_HPP_NAMESPACE::CuFunctionNVX release()
  4997. {
  4998. m_device = nullptr;
  4999. m_allocator = nullptr;
  5000. m_dispatcher = nullptr;
  5001. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_function, nullptr );
  5002. }
  5003. VULKAN_HPP_NAMESPACE::Device getDevice() const
  5004. {
  5005. return m_device;
  5006. }
  5007. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  5008. {
  5009. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  5010. return m_dispatcher;
  5011. }
  5012. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX & rhs ) VULKAN_HPP_NOEXCEPT
  5013. {
  5014. std::swap( m_device, rhs.m_device );
  5015. std::swap( m_function, rhs.m_function );
  5016. std::swap( m_allocator, rhs.m_allocator );
  5017. std::swap( m_dispatcher, rhs.m_dispatcher );
  5018. }
  5019. private:
  5020. VULKAN_HPP_NAMESPACE::Device m_device = {};
  5021. VULKAN_HPP_NAMESPACE::CuFunctionNVX m_function = {};
  5022. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  5023. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  5024. };
  5025. class CuModuleNVX
  5026. {
  5027. public:
  5028. using CType = VkCuModuleNVX;
  5029. using CppType = vk::CuModuleNVX;
  5030. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCuModuleNVX;
  5031. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  5032. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCuModuleNVX;
  5033. public:
  5034. CuModuleNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  5035. VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo,
  5036. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  5037. : m_device( *device )
  5038. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  5039. , m_dispatcher( device.getDispatcher() )
  5040. {
  5041. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  5042. device.getDispatcher()->vkCreateCuModuleNVX( static_cast<VkDevice>( *device ),
  5043. reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
  5044. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  5045. reinterpret_cast<VkCuModuleNVX *>( &m_module ) ) );
  5046. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  5047. {
  5048. detail::throwResultException( result, "vkCreateCuModuleNVX" );
  5049. }
  5050. }
  5051. CuModuleNVX( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  5052. VkCuModuleNVX module,
  5053. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  5054. : m_device( *device )
  5055. , m_module( module )
  5056. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  5057. , m_dispatcher( device.getDispatcher() )
  5058. {
  5059. }
  5060. CuModuleNVX( std::nullptr_t ) {}
  5061. ~CuModuleNVX()
  5062. {
  5063. clear();
  5064. }
  5065. CuModuleNVX() = delete;
  5066. CuModuleNVX( CuModuleNVX const & ) = delete;
  5067. CuModuleNVX( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT
  5068. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  5069. , m_module( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_module, {} ) )
  5070. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  5071. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  5072. {
  5073. }
  5074. CuModuleNVX & operator=( CuModuleNVX const & ) = delete;
  5075. CuModuleNVX & operator =( CuModuleNVX && rhs ) VULKAN_HPP_NOEXCEPT
  5076. {
  5077. if ( this != &rhs )
  5078. {
  5079. std::swap( m_device, rhs.m_device );
  5080. std::swap( m_module, rhs.m_module );
  5081. std::swap( m_allocator, rhs.m_allocator );
  5082. std::swap( m_dispatcher, rhs.m_dispatcher );
  5083. }
  5084. return *this;
  5085. }
  5086. VULKAN_HPP_NAMESPACE::CuModuleNVX const & operator*() const VULKAN_HPP_NOEXCEPT
  5087. {
  5088. return m_module;
  5089. }
  5090. void clear() VULKAN_HPP_NOEXCEPT
  5091. {
  5092. if ( m_module )
  5093. {
  5094. getDispatcher()->vkDestroyCuModuleNVX(
  5095. static_cast<VkDevice>( m_device ), static_cast<VkCuModuleNVX>( m_module ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  5096. }
  5097. m_device = nullptr;
  5098. m_module = nullptr;
  5099. m_allocator = nullptr;
  5100. m_dispatcher = nullptr;
  5101. }
  5102. VULKAN_HPP_NAMESPACE::CuModuleNVX release()
  5103. {
  5104. m_device = nullptr;
  5105. m_allocator = nullptr;
  5106. m_dispatcher = nullptr;
  5107. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_module, nullptr );
  5108. }
  5109. VULKAN_HPP_NAMESPACE::Device getDevice() const
  5110. {
  5111. return m_device;
  5112. }
  5113. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  5114. {
  5115. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  5116. return m_dispatcher;
  5117. }
  5118. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX & rhs ) VULKAN_HPP_NOEXCEPT
  5119. {
  5120. std::swap( m_device, rhs.m_device );
  5121. std::swap( m_module, rhs.m_module );
  5122. std::swap( m_allocator, rhs.m_allocator );
  5123. std::swap( m_dispatcher, rhs.m_dispatcher );
  5124. }
  5125. private:
  5126. VULKAN_HPP_NAMESPACE::Device m_device = {};
  5127. VULKAN_HPP_NAMESPACE::CuModuleNVX m_module = {};
  5128. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  5129. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  5130. };
  5131. class DebugReportCallbackEXT
  5132. {
  5133. public:
  5134. using CType = VkDebugReportCallbackEXT;
  5135. using CppType = vk::DebugReportCallbackEXT;
  5136. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT;
  5137. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  5138. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT;
  5139. public:
  5140. DebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  5141. VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo,
  5142. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  5143. : m_instance( *instance )
  5144. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  5145. , m_dispatcher( instance.getDispatcher() )
  5146. {
  5147. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  5148. instance.getDispatcher()->vkCreateDebugReportCallbackEXT( static_cast<VkInstance>( *instance ),
  5149. reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
  5150. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  5151. reinterpret_cast<VkDebugReportCallbackEXT *>( &m_callback ) ) );
  5152. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  5153. {
  5154. detail::throwResultException( result, "vkCreateDebugReportCallbackEXT" );
  5155. }
  5156. }
  5157. DebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  5158. VkDebugReportCallbackEXT callback,
  5159. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  5160. : m_instance( *instance )
  5161. , m_callback( callback )
  5162. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  5163. , m_dispatcher( instance.getDispatcher() )
  5164. {
  5165. }
  5166. DebugReportCallbackEXT( std::nullptr_t ) {}
  5167. ~DebugReportCallbackEXT()
  5168. {
  5169. clear();
  5170. }
  5171. DebugReportCallbackEXT() = delete;
  5172. DebugReportCallbackEXT( DebugReportCallbackEXT const & ) = delete;
  5173. DebugReportCallbackEXT( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT
  5174. : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) )
  5175. , m_callback( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_callback, {} ) )
  5176. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  5177. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  5178. {
  5179. }
  5180. DebugReportCallbackEXT & operator=( DebugReportCallbackEXT const & ) = delete;
  5181. DebugReportCallbackEXT & operator =( DebugReportCallbackEXT && rhs ) VULKAN_HPP_NOEXCEPT
  5182. {
  5183. if ( this != &rhs )
  5184. {
  5185. std::swap( m_instance, rhs.m_instance );
  5186. std::swap( m_callback, rhs.m_callback );
  5187. std::swap( m_allocator, rhs.m_allocator );
  5188. std::swap( m_dispatcher, rhs.m_dispatcher );
  5189. }
  5190. return *this;
  5191. }
  5192. VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const & operator*() const VULKAN_HPP_NOEXCEPT
  5193. {
  5194. return m_callback;
  5195. }
  5196. void clear() VULKAN_HPP_NOEXCEPT
  5197. {
  5198. if ( m_callback )
  5199. {
  5200. getDispatcher()->vkDestroyDebugReportCallbackEXT( static_cast<VkInstance>( m_instance ),
  5201. static_cast<VkDebugReportCallbackEXT>( m_callback ),
  5202. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  5203. }
  5204. m_instance = nullptr;
  5205. m_callback = nullptr;
  5206. m_allocator = nullptr;
  5207. m_dispatcher = nullptr;
  5208. }
  5209. VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT release()
  5210. {
  5211. m_instance = nullptr;
  5212. m_allocator = nullptr;
  5213. m_dispatcher = nullptr;
  5214. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_callback, nullptr );
  5215. }
  5216. VULKAN_HPP_NAMESPACE::Instance getInstance() const
  5217. {
  5218. return m_instance;
  5219. }
  5220. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const
  5221. {
  5222. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  5223. return m_dispatcher;
  5224. }
  5225. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT & rhs ) VULKAN_HPP_NOEXCEPT
  5226. {
  5227. std::swap( m_instance, rhs.m_instance );
  5228. std::swap( m_callback, rhs.m_callback );
  5229. std::swap( m_allocator, rhs.m_allocator );
  5230. std::swap( m_dispatcher, rhs.m_dispatcher );
  5231. }
  5232. private:
  5233. VULKAN_HPP_NAMESPACE::Instance m_instance = {};
  5234. VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT m_callback = {};
  5235. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  5236. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr;
  5237. };
  5238. class DebugUtilsMessengerEXT
  5239. {
  5240. public:
  5241. using CType = VkDebugUtilsMessengerEXT;
  5242. using CppType = vk::DebugUtilsMessengerEXT;
  5243. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT;
  5244. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  5245. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
  5246. public:
  5247. DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  5248. VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo,
  5249. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  5250. : m_instance( *instance )
  5251. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  5252. , m_dispatcher( instance.getDispatcher() )
  5253. {
  5254. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  5255. instance.getDispatcher()->vkCreateDebugUtilsMessengerEXT( static_cast<VkInstance>( *instance ),
  5256. reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
  5257. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  5258. reinterpret_cast<VkDebugUtilsMessengerEXT *>( &m_messenger ) ) );
  5259. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  5260. {
  5261. detail::throwResultException( result, "vkCreateDebugUtilsMessengerEXT" );
  5262. }
  5263. }
  5264. DebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  5265. VkDebugUtilsMessengerEXT messenger,
  5266. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  5267. : m_instance( *instance )
  5268. , m_messenger( messenger )
  5269. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  5270. , m_dispatcher( instance.getDispatcher() )
  5271. {
  5272. }
  5273. DebugUtilsMessengerEXT( std::nullptr_t ) {}
  5274. ~DebugUtilsMessengerEXT()
  5275. {
  5276. clear();
  5277. }
  5278. DebugUtilsMessengerEXT() = delete;
  5279. DebugUtilsMessengerEXT( DebugUtilsMessengerEXT const & ) = delete;
  5280. DebugUtilsMessengerEXT( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT
  5281. : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) )
  5282. , m_messenger( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_messenger, {} ) )
  5283. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  5284. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  5285. {
  5286. }
  5287. DebugUtilsMessengerEXT & operator=( DebugUtilsMessengerEXT const & ) = delete;
  5288. DebugUtilsMessengerEXT & operator =( DebugUtilsMessengerEXT && rhs ) VULKAN_HPP_NOEXCEPT
  5289. {
  5290. if ( this != &rhs )
  5291. {
  5292. std::swap( m_instance, rhs.m_instance );
  5293. std::swap( m_messenger, rhs.m_messenger );
  5294. std::swap( m_allocator, rhs.m_allocator );
  5295. std::swap( m_dispatcher, rhs.m_dispatcher );
  5296. }
  5297. return *this;
  5298. }
  5299. VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const & operator*() const VULKAN_HPP_NOEXCEPT
  5300. {
  5301. return m_messenger;
  5302. }
  5303. void clear() VULKAN_HPP_NOEXCEPT
  5304. {
  5305. if ( m_messenger )
  5306. {
  5307. getDispatcher()->vkDestroyDebugUtilsMessengerEXT( static_cast<VkInstance>( m_instance ),
  5308. static_cast<VkDebugUtilsMessengerEXT>( m_messenger ),
  5309. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  5310. }
  5311. m_instance = nullptr;
  5312. m_messenger = nullptr;
  5313. m_allocator = nullptr;
  5314. m_dispatcher = nullptr;
  5315. }
  5316. VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT release()
  5317. {
  5318. m_instance = nullptr;
  5319. m_allocator = nullptr;
  5320. m_dispatcher = nullptr;
  5321. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_messenger, nullptr );
  5322. }
  5323. VULKAN_HPP_NAMESPACE::Instance getInstance() const
  5324. {
  5325. return m_instance;
  5326. }
  5327. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const
  5328. {
  5329. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  5330. return m_dispatcher;
  5331. }
  5332. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT & rhs ) VULKAN_HPP_NOEXCEPT
  5333. {
  5334. std::swap( m_instance, rhs.m_instance );
  5335. std::swap( m_messenger, rhs.m_messenger );
  5336. std::swap( m_allocator, rhs.m_allocator );
  5337. std::swap( m_dispatcher, rhs.m_dispatcher );
  5338. }
  5339. private:
  5340. VULKAN_HPP_NAMESPACE::Instance m_instance = {};
  5341. VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT m_messenger = {};
  5342. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  5343. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr;
  5344. };
  5345. class DeferredOperationKHR
  5346. {
  5347. public:
  5348. using CType = VkDeferredOperationKHR;
  5349. using CppType = vk::DeferredOperationKHR;
  5350. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR;
  5351. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  5352. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
  5353. public:
  5354. DeferredOperationKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  5355. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  5356. : m_device( *device )
  5357. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  5358. , m_dispatcher( device.getDispatcher() )
  5359. {
  5360. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  5361. device.getDispatcher()->vkCreateDeferredOperationKHR( static_cast<VkDevice>( *device ),
  5362. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  5363. reinterpret_cast<VkDeferredOperationKHR *>( &m_operation ) ) );
  5364. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  5365. {
  5366. detail::throwResultException( result, "vkCreateDeferredOperationKHR" );
  5367. }
  5368. }
  5369. DeferredOperationKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  5370. VkDeferredOperationKHR operation,
  5371. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  5372. : m_device( *device )
  5373. , m_operation( operation )
  5374. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  5375. , m_dispatcher( device.getDispatcher() )
  5376. {
  5377. }
  5378. DeferredOperationKHR( std::nullptr_t ) {}
  5379. ~DeferredOperationKHR()
  5380. {
  5381. clear();
  5382. }
  5383. DeferredOperationKHR() = delete;
  5384. DeferredOperationKHR( DeferredOperationKHR const & ) = delete;
  5385. DeferredOperationKHR( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT
  5386. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  5387. , m_operation( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_operation, {} ) )
  5388. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  5389. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  5390. {
  5391. }
  5392. DeferredOperationKHR & operator=( DeferredOperationKHR const & ) = delete;
  5393. DeferredOperationKHR & operator =( DeferredOperationKHR && rhs ) VULKAN_HPP_NOEXCEPT
  5394. {
  5395. if ( this != &rhs )
  5396. {
  5397. std::swap( m_device, rhs.m_device );
  5398. std::swap( m_operation, rhs.m_operation );
  5399. std::swap( m_allocator, rhs.m_allocator );
  5400. std::swap( m_dispatcher, rhs.m_dispatcher );
  5401. }
  5402. return *this;
  5403. }
  5404. VULKAN_HPP_NAMESPACE::DeferredOperationKHR const & operator*() const VULKAN_HPP_NOEXCEPT
  5405. {
  5406. return m_operation;
  5407. }
  5408. void clear() VULKAN_HPP_NOEXCEPT
  5409. {
  5410. if ( m_operation )
  5411. {
  5412. getDispatcher()->vkDestroyDeferredOperationKHR( static_cast<VkDevice>( m_device ),
  5413. static_cast<VkDeferredOperationKHR>( m_operation ),
  5414. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  5415. }
  5416. m_device = nullptr;
  5417. m_operation = nullptr;
  5418. m_allocator = nullptr;
  5419. m_dispatcher = nullptr;
  5420. }
  5421. VULKAN_HPP_NAMESPACE::DeferredOperationKHR release()
  5422. {
  5423. m_device = nullptr;
  5424. m_allocator = nullptr;
  5425. m_dispatcher = nullptr;
  5426. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_operation, nullptr );
  5427. }
  5428. VULKAN_HPP_NAMESPACE::Device getDevice() const
  5429. {
  5430. return m_device;
  5431. }
  5432. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  5433. {
  5434. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  5435. return m_dispatcher;
  5436. }
  5437. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR & rhs ) VULKAN_HPP_NOEXCEPT
  5438. {
  5439. std::swap( m_device, rhs.m_device );
  5440. std::swap( m_operation, rhs.m_operation );
  5441. std::swap( m_allocator, rhs.m_allocator );
  5442. std::swap( m_dispatcher, rhs.m_dispatcher );
  5443. }
  5444. //=== VK_KHR_deferred_host_operations ===
  5445. VULKAN_HPP_NODISCARD uint32_t getMaxConcurrency() const VULKAN_HPP_NOEXCEPT;
  5446. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getResult() const VULKAN_HPP_NOEXCEPT;
  5447. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result join() const;
  5448. private:
  5449. VULKAN_HPP_NAMESPACE::Device m_device = {};
  5450. VULKAN_HPP_NAMESPACE::DeferredOperationKHR m_operation = {};
  5451. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  5452. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  5453. };
  5454. class DescriptorPool
  5455. {
  5456. public:
  5457. using CType = VkDescriptorPool;
  5458. using CppType = vk::DescriptorPool;
  5459. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool;
  5460. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  5461. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool;
  5462. public:
  5463. DescriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  5464. VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo,
  5465. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  5466. : m_device( *device )
  5467. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  5468. , m_dispatcher( device.getDispatcher() )
  5469. {
  5470. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  5471. device.getDispatcher()->vkCreateDescriptorPool( static_cast<VkDevice>( *device ),
  5472. reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
  5473. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  5474. reinterpret_cast<VkDescriptorPool *>( &m_descriptorPool ) ) );
  5475. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  5476. {
  5477. detail::throwResultException( result, "vkCreateDescriptorPool" );
  5478. }
  5479. }
  5480. DescriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  5481. VkDescriptorPool descriptorPool,
  5482. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  5483. : m_device( *device )
  5484. , m_descriptorPool( descriptorPool )
  5485. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  5486. , m_dispatcher( device.getDispatcher() )
  5487. {
  5488. }
  5489. DescriptorPool( std::nullptr_t ) {}
  5490. ~DescriptorPool()
  5491. {
  5492. clear();
  5493. }
  5494. DescriptorPool() = delete;
  5495. DescriptorPool( DescriptorPool const & ) = delete;
  5496. DescriptorPool( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT
  5497. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  5498. , m_descriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) )
  5499. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  5500. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  5501. {
  5502. }
  5503. DescriptorPool & operator=( DescriptorPool const & ) = delete;
  5504. DescriptorPool & operator =( DescriptorPool && rhs ) VULKAN_HPP_NOEXCEPT
  5505. {
  5506. if ( this != &rhs )
  5507. {
  5508. std::swap( m_device, rhs.m_device );
  5509. std::swap( m_descriptorPool, rhs.m_descriptorPool );
  5510. std::swap( m_allocator, rhs.m_allocator );
  5511. std::swap( m_dispatcher, rhs.m_dispatcher );
  5512. }
  5513. return *this;
  5514. }
  5515. VULKAN_HPP_NAMESPACE::DescriptorPool const & operator*() const VULKAN_HPP_NOEXCEPT
  5516. {
  5517. return m_descriptorPool;
  5518. }
  5519. void clear() VULKAN_HPP_NOEXCEPT
  5520. {
  5521. if ( m_descriptorPool )
  5522. {
  5523. getDispatcher()->vkDestroyDescriptorPool( static_cast<VkDevice>( m_device ),
  5524. static_cast<VkDescriptorPool>( m_descriptorPool ),
  5525. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  5526. }
  5527. m_device = nullptr;
  5528. m_descriptorPool = nullptr;
  5529. m_allocator = nullptr;
  5530. m_dispatcher = nullptr;
  5531. }
  5532. VULKAN_HPP_NAMESPACE::DescriptorPool release()
  5533. {
  5534. m_device = nullptr;
  5535. m_allocator = nullptr;
  5536. m_dispatcher = nullptr;
  5537. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_descriptorPool, nullptr );
  5538. }
  5539. VULKAN_HPP_NAMESPACE::Device getDevice() const
  5540. {
  5541. return m_device;
  5542. }
  5543. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  5544. {
  5545. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  5546. return m_dispatcher;
  5547. }
  5548. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorPool & rhs ) VULKAN_HPP_NOEXCEPT
  5549. {
  5550. std::swap( m_device, rhs.m_device );
  5551. std::swap( m_descriptorPool, rhs.m_descriptorPool );
  5552. std::swap( m_allocator, rhs.m_allocator );
  5553. std::swap( m_dispatcher, rhs.m_dispatcher );
  5554. }
  5555. //=== VK_VERSION_1_0 ===
  5556. void reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  5557. private:
  5558. VULKAN_HPP_NAMESPACE::Device m_device = {};
  5559. VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {};
  5560. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  5561. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  5562. };
  5563. class DescriptorSet
  5564. {
  5565. public:
  5566. using CType = VkDescriptorSet;
  5567. using CppType = vk::DescriptorSet;
  5568. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet;
  5569. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  5570. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet;
  5571. public:
  5572. DescriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkDescriptorSet descriptorSet, VkDescriptorPool descriptorPool )
  5573. : m_device( *device ), m_descriptorPool( descriptorPool ), m_descriptorSet( descriptorSet ), m_dispatcher( device.getDispatcher() )
  5574. {
  5575. }
  5576. DescriptorSet( std::nullptr_t ) {}
  5577. ~DescriptorSet()
  5578. {
  5579. clear();
  5580. }
  5581. DescriptorSet() = delete;
  5582. DescriptorSet( DescriptorSet const & ) = delete;
  5583. DescriptorSet( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT
  5584. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  5585. , m_descriptorPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorPool, {} ) )
  5586. , m_descriptorSet( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSet, {} ) )
  5587. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  5588. {
  5589. }
  5590. DescriptorSet & operator=( DescriptorSet const & ) = delete;
  5591. DescriptorSet & operator =( DescriptorSet && rhs ) VULKAN_HPP_NOEXCEPT
  5592. {
  5593. if ( this != &rhs )
  5594. {
  5595. std::swap( m_device, rhs.m_device );
  5596. std::swap( m_descriptorPool, rhs.m_descriptorPool );
  5597. std::swap( m_descriptorSet, rhs.m_descriptorSet );
  5598. std::swap( m_dispatcher, rhs.m_dispatcher );
  5599. }
  5600. return *this;
  5601. }
  5602. VULKAN_HPP_NAMESPACE::DescriptorSet const & operator*() const VULKAN_HPP_NOEXCEPT
  5603. {
  5604. return m_descriptorSet;
  5605. }
  5606. void clear() VULKAN_HPP_NOEXCEPT
  5607. {
  5608. if ( m_descriptorSet )
  5609. {
  5610. getDispatcher()->vkFreeDescriptorSets( static_cast<VkDevice>( m_device ),
  5611. static_cast<VkDescriptorPool>( m_descriptorPool ),
  5612. 1,
  5613. reinterpret_cast<VkDescriptorSet const *>( &m_descriptorSet ) );
  5614. }
  5615. m_device = nullptr;
  5616. m_descriptorPool = nullptr;
  5617. m_descriptorSet = nullptr;
  5618. m_dispatcher = nullptr;
  5619. }
  5620. VULKAN_HPP_NAMESPACE::DescriptorSet release()
  5621. {
  5622. m_device = nullptr;
  5623. m_descriptorPool = nullptr;
  5624. m_dispatcher = nullptr;
  5625. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_descriptorSet, nullptr );
  5626. }
  5627. VULKAN_HPP_NAMESPACE::Device getDevice() const
  5628. {
  5629. return m_device;
  5630. }
  5631. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  5632. {
  5633. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  5634. return m_dispatcher;
  5635. }
  5636. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSet & rhs ) VULKAN_HPP_NOEXCEPT
  5637. {
  5638. std::swap( m_device, rhs.m_device );
  5639. std::swap( m_descriptorPool, rhs.m_descriptorPool );
  5640. std::swap( m_descriptorSet, rhs.m_descriptorSet );
  5641. std::swap( m_dispatcher, rhs.m_dispatcher );
  5642. }
  5643. //=== VK_VERSION_1_1 ===
  5644. template <typename DataType>
  5645. void updateWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data ) const VULKAN_HPP_NOEXCEPT;
  5646. //=== VK_KHR_descriptor_update_template ===
  5647. template <typename DataType>
  5648. void updateWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data ) const VULKAN_HPP_NOEXCEPT;
  5649. //=== VK_VALVE_descriptor_set_host_mapping ===
  5650. VULKAN_HPP_NODISCARD void * getHostMappingVALVE() const VULKAN_HPP_NOEXCEPT;
  5651. private:
  5652. VULKAN_HPP_NAMESPACE::Device m_device = {};
  5653. VULKAN_HPP_NAMESPACE::DescriptorPool m_descriptorPool = {};
  5654. VULKAN_HPP_NAMESPACE::DescriptorSet m_descriptorSet = {};
  5655. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  5656. };
  5657. class DescriptorSets : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSet>
  5658. {
  5659. public:
  5660. DescriptorSets( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  5661. VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo )
  5662. {
  5663. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
  5664. std::vector<VkDescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
  5665. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkAllocateDescriptorSets(
  5666. static_cast<VkDevice>( *device ), reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), descriptorSets.data() ) );
  5667. if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
  5668. {
  5669. this->reserve( allocateInfo.descriptorSetCount );
  5670. for ( auto const & descriptorSet : descriptorSets )
  5671. {
  5672. this->emplace_back( device, descriptorSet, static_cast<VkDescriptorPool>( allocateInfo.descriptorPool ) );
  5673. }
  5674. }
  5675. else
  5676. {
  5677. detail::throwResultException( result, "vkAllocateDescriptorSets" );
  5678. }
  5679. }
  5680. DescriptorSets( std::nullptr_t ) {}
  5681. DescriptorSets() = delete;
  5682. DescriptorSets( DescriptorSets const & ) = delete;
  5683. DescriptorSets( DescriptorSets && rhs ) = default;
  5684. DescriptorSets & operator=( DescriptorSets const & ) = delete;
  5685. DescriptorSets & operator=( DescriptorSets && rhs ) = default;
  5686. };
  5687. class DescriptorSetLayout
  5688. {
  5689. public:
  5690. using CType = VkDescriptorSetLayout;
  5691. using CppType = vk::DescriptorSetLayout;
  5692. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout;
  5693. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  5694. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout;
  5695. public:
  5696. DescriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  5697. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo,
  5698. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  5699. : m_device( *device )
  5700. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  5701. , m_dispatcher( device.getDispatcher() )
  5702. {
  5703. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  5704. device.getDispatcher()->vkCreateDescriptorSetLayout( static_cast<VkDevice>( *device ),
  5705. reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
  5706. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  5707. reinterpret_cast<VkDescriptorSetLayout *>( &m_descriptorSetLayout ) ) );
  5708. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  5709. {
  5710. detail::throwResultException( result, "vkCreateDescriptorSetLayout" );
  5711. }
  5712. }
  5713. DescriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  5714. VkDescriptorSetLayout descriptorSetLayout,
  5715. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  5716. : m_device( *device )
  5717. , m_descriptorSetLayout( descriptorSetLayout )
  5718. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  5719. , m_dispatcher( device.getDispatcher() )
  5720. {
  5721. }
  5722. DescriptorSetLayout( std::nullptr_t ) {}
  5723. ~DescriptorSetLayout()
  5724. {
  5725. clear();
  5726. }
  5727. DescriptorSetLayout() = delete;
  5728. DescriptorSetLayout( DescriptorSetLayout const & ) = delete;
  5729. DescriptorSetLayout( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT
  5730. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  5731. , m_descriptorSetLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorSetLayout, {} ) )
  5732. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  5733. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  5734. {
  5735. }
  5736. DescriptorSetLayout & operator=( DescriptorSetLayout const & ) = delete;
  5737. DescriptorSetLayout & operator =( DescriptorSetLayout && rhs ) VULKAN_HPP_NOEXCEPT
  5738. {
  5739. if ( this != &rhs )
  5740. {
  5741. std::swap( m_device, rhs.m_device );
  5742. std::swap( m_descriptorSetLayout, rhs.m_descriptorSetLayout );
  5743. std::swap( m_allocator, rhs.m_allocator );
  5744. std::swap( m_dispatcher, rhs.m_dispatcher );
  5745. }
  5746. return *this;
  5747. }
  5748. VULKAN_HPP_NAMESPACE::DescriptorSetLayout const & operator*() const VULKAN_HPP_NOEXCEPT
  5749. {
  5750. return m_descriptorSetLayout;
  5751. }
  5752. void clear() VULKAN_HPP_NOEXCEPT
  5753. {
  5754. if ( m_descriptorSetLayout )
  5755. {
  5756. getDispatcher()->vkDestroyDescriptorSetLayout( static_cast<VkDevice>( m_device ),
  5757. static_cast<VkDescriptorSetLayout>( m_descriptorSetLayout ),
  5758. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  5759. }
  5760. m_device = nullptr;
  5761. m_descriptorSetLayout = nullptr;
  5762. m_allocator = nullptr;
  5763. m_dispatcher = nullptr;
  5764. }
  5765. VULKAN_HPP_NAMESPACE::DescriptorSetLayout release()
  5766. {
  5767. m_device = nullptr;
  5768. m_allocator = nullptr;
  5769. m_dispatcher = nullptr;
  5770. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_descriptorSetLayout, nullptr );
  5771. }
  5772. VULKAN_HPP_NAMESPACE::Device getDevice() const
  5773. {
  5774. return m_device;
  5775. }
  5776. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  5777. {
  5778. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  5779. return m_dispatcher;
  5780. }
  5781. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout & rhs ) VULKAN_HPP_NOEXCEPT
  5782. {
  5783. std::swap( m_device, rhs.m_device );
  5784. std::swap( m_descriptorSetLayout, rhs.m_descriptorSetLayout );
  5785. std::swap( m_allocator, rhs.m_allocator );
  5786. std::swap( m_dispatcher, rhs.m_dispatcher );
  5787. }
  5788. //=== VK_EXT_descriptor_buffer ===
  5789. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getSizeEXT() const VULKAN_HPP_NOEXCEPT;
  5790. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getBindingOffsetEXT( uint32_t binding ) const VULKAN_HPP_NOEXCEPT;
  5791. private:
  5792. VULKAN_HPP_NAMESPACE::Device m_device = {};
  5793. VULKAN_HPP_NAMESPACE::DescriptorSetLayout m_descriptorSetLayout = {};
  5794. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  5795. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  5796. };
  5797. class DescriptorUpdateTemplate
  5798. {
  5799. public:
  5800. using CType = VkDescriptorUpdateTemplate;
  5801. using CppType = vk::DescriptorUpdateTemplate;
  5802. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate;
  5803. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  5804. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate;
  5805. public:
  5806. DescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  5807. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
  5808. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  5809. : m_device( *device )
  5810. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  5811. , m_dispatcher( device.getDispatcher() )
  5812. {
  5813. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  5814. device.getDispatcher()->vkCreateDescriptorUpdateTemplate( static_cast<VkDevice>( *device ),
  5815. reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
  5816. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  5817. reinterpret_cast<VkDescriptorUpdateTemplate *>( &m_descriptorUpdateTemplate ) ) );
  5818. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  5819. {
  5820. detail::throwResultException( result, "vkCreateDescriptorUpdateTemplate" );
  5821. }
  5822. }
  5823. DescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  5824. VkDescriptorUpdateTemplate descriptorUpdateTemplate,
  5825. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  5826. : m_device( *device )
  5827. , m_descriptorUpdateTemplate( descriptorUpdateTemplate )
  5828. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  5829. , m_dispatcher( device.getDispatcher() )
  5830. {
  5831. }
  5832. DescriptorUpdateTemplate( std::nullptr_t ) {}
  5833. ~DescriptorUpdateTemplate()
  5834. {
  5835. clear();
  5836. }
  5837. DescriptorUpdateTemplate() = delete;
  5838. DescriptorUpdateTemplate( DescriptorUpdateTemplate const & ) = delete;
  5839. DescriptorUpdateTemplate( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT
  5840. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  5841. , m_descriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_descriptorUpdateTemplate, {} ) )
  5842. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  5843. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  5844. {
  5845. }
  5846. DescriptorUpdateTemplate & operator=( DescriptorUpdateTemplate const & ) = delete;
  5847. DescriptorUpdateTemplate & operator =( DescriptorUpdateTemplate && rhs ) VULKAN_HPP_NOEXCEPT
  5848. {
  5849. if ( this != &rhs )
  5850. {
  5851. std::swap( m_device, rhs.m_device );
  5852. std::swap( m_descriptorUpdateTemplate, rhs.m_descriptorUpdateTemplate );
  5853. std::swap( m_allocator, rhs.m_allocator );
  5854. std::swap( m_dispatcher, rhs.m_dispatcher );
  5855. }
  5856. return *this;
  5857. }
  5858. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const & operator*() const VULKAN_HPP_NOEXCEPT
  5859. {
  5860. return m_descriptorUpdateTemplate;
  5861. }
  5862. void clear() VULKAN_HPP_NOEXCEPT
  5863. {
  5864. if ( m_descriptorUpdateTemplate )
  5865. {
  5866. getDispatcher()->vkDestroyDescriptorUpdateTemplate( static_cast<VkDevice>( m_device ),
  5867. static_cast<VkDescriptorUpdateTemplate>( m_descriptorUpdateTemplate ),
  5868. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  5869. }
  5870. m_device = nullptr;
  5871. m_descriptorUpdateTemplate = nullptr;
  5872. m_allocator = nullptr;
  5873. m_dispatcher = nullptr;
  5874. }
  5875. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate release()
  5876. {
  5877. m_device = nullptr;
  5878. m_allocator = nullptr;
  5879. m_dispatcher = nullptr;
  5880. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_descriptorUpdateTemplate, nullptr );
  5881. }
  5882. VULKAN_HPP_NAMESPACE::Device getDevice() const
  5883. {
  5884. return m_device;
  5885. }
  5886. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  5887. {
  5888. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  5889. return m_dispatcher;
  5890. }
  5891. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate & rhs ) VULKAN_HPP_NOEXCEPT
  5892. {
  5893. std::swap( m_device, rhs.m_device );
  5894. std::swap( m_descriptorUpdateTemplate, rhs.m_descriptorUpdateTemplate );
  5895. std::swap( m_allocator, rhs.m_allocator );
  5896. std::swap( m_dispatcher, rhs.m_dispatcher );
  5897. }
  5898. private:
  5899. VULKAN_HPP_NAMESPACE::Device m_device = {};
  5900. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate m_descriptorUpdateTemplate = {};
  5901. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  5902. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  5903. };
  5904. class DeviceMemory
  5905. {
  5906. public:
  5907. using CType = VkDeviceMemory;
  5908. using CppType = vk::DeviceMemory;
  5909. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory;
  5910. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  5911. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory;
  5912. public:
  5913. DeviceMemory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  5914. VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo,
  5915. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  5916. : m_device( *device )
  5917. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  5918. , m_dispatcher( device.getDispatcher() )
  5919. {
  5920. VULKAN_HPP_NAMESPACE::Result result =
  5921. static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkAllocateMemory( static_cast<VkDevice>( *device ),
  5922. reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
  5923. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  5924. reinterpret_cast<VkDeviceMemory *>( &m_memory ) ) );
  5925. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  5926. {
  5927. detail::throwResultException( result, "vkAllocateMemory" );
  5928. }
  5929. }
  5930. DeviceMemory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  5931. VkDeviceMemory memory,
  5932. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  5933. : m_device( *device )
  5934. , m_memory( memory )
  5935. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  5936. , m_dispatcher( device.getDispatcher() )
  5937. {
  5938. }
  5939. DeviceMemory( std::nullptr_t ) {}
  5940. ~DeviceMemory()
  5941. {
  5942. clear();
  5943. }
  5944. DeviceMemory() = delete;
  5945. DeviceMemory( DeviceMemory const & ) = delete;
  5946. DeviceMemory( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT
  5947. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  5948. , m_memory( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_memory, {} ) )
  5949. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  5950. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  5951. {
  5952. }
  5953. DeviceMemory & operator=( DeviceMemory const & ) = delete;
  5954. DeviceMemory & operator =( DeviceMemory && rhs ) VULKAN_HPP_NOEXCEPT
  5955. {
  5956. if ( this != &rhs )
  5957. {
  5958. std::swap( m_device, rhs.m_device );
  5959. std::swap( m_memory, rhs.m_memory );
  5960. std::swap( m_allocator, rhs.m_allocator );
  5961. std::swap( m_dispatcher, rhs.m_dispatcher );
  5962. }
  5963. return *this;
  5964. }
  5965. VULKAN_HPP_NAMESPACE::DeviceMemory const & operator*() const VULKAN_HPP_NOEXCEPT
  5966. {
  5967. return m_memory;
  5968. }
  5969. void clear() VULKAN_HPP_NOEXCEPT
  5970. {
  5971. if ( m_memory )
  5972. {
  5973. getDispatcher()->vkFreeMemory(
  5974. static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  5975. }
  5976. m_device = nullptr;
  5977. m_memory = nullptr;
  5978. m_allocator = nullptr;
  5979. m_dispatcher = nullptr;
  5980. }
  5981. VULKAN_HPP_NAMESPACE::DeviceMemory release()
  5982. {
  5983. m_device = nullptr;
  5984. m_allocator = nullptr;
  5985. m_dispatcher = nullptr;
  5986. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_memory, nullptr );
  5987. }
  5988. VULKAN_HPP_NAMESPACE::Device getDevice() const
  5989. {
  5990. return m_device;
  5991. }
  5992. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  5993. {
  5994. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  5995. return m_dispatcher;
  5996. }
  5997. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceMemory & rhs ) VULKAN_HPP_NOEXCEPT
  5998. {
  5999. std::swap( m_device, rhs.m_device );
  6000. std::swap( m_memory, rhs.m_memory );
  6001. std::swap( m_allocator, rhs.m_allocator );
  6002. std::swap( m_dispatcher, rhs.m_dispatcher );
  6003. }
  6004. //=== VK_VERSION_1_0 ===
  6005. VULKAN_HPP_NODISCARD void * mapMemory( VULKAN_HPP_NAMESPACE::DeviceSize offset,
  6006. VULKAN_HPP_NAMESPACE::DeviceSize size,
  6007. VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
  6008. void unmapMemory() const VULKAN_HPP_NOEXCEPT;
  6009. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getCommitment() const VULKAN_HPP_NOEXCEPT;
  6010. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  6011. //=== VK_NV_external_memory_win32 ===
  6012. VULKAN_HPP_NODISCARD HANDLE getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType ) const;
  6013. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  6014. //=== VK_EXT_pageable_device_local_memory ===
  6015. void setPriorityEXT( float priority ) const VULKAN_HPP_NOEXCEPT;
  6016. private:
  6017. VULKAN_HPP_NAMESPACE::Device m_device = {};
  6018. VULKAN_HPP_NAMESPACE::DeviceMemory m_memory = {};
  6019. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  6020. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  6021. };
  6022. class DisplayKHR
  6023. {
  6024. public:
  6025. using CType = VkDisplayKHR;
  6026. using CppType = vk::DisplayKHR;
  6027. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR;
  6028. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  6029. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR;
  6030. public:
  6031. DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, int32_t drmFd, uint32_t connectorId )
  6032. : m_physicalDevice( *physicalDevice ), m_dispatcher( physicalDevice.getDispatcher() )
  6033. {
  6034. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( physicalDevice.getDispatcher()->vkGetDrmDisplayEXT(
  6035. static_cast<VkPhysicalDevice>( *physicalDevice ), drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &m_display ) ) );
  6036. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  6037. {
  6038. detail::throwResultException( result, "vkGetDrmDisplayEXT" );
  6039. }
  6040. }
  6041. # if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
  6042. DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, Display & dpy, RROutput rrOutput )
  6043. : m_physicalDevice( *physicalDevice ), m_dispatcher( physicalDevice.getDispatcher() )
  6044. {
  6045. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( physicalDevice.getDispatcher()->vkGetRandROutputDisplayEXT(
  6046. static_cast<VkPhysicalDevice>( *physicalDevice ), &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &m_display ) ) );
  6047. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  6048. {
  6049. detail::throwResultException( result, "vkGetRandROutputDisplayEXT" );
  6050. }
  6051. }
  6052. # endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
  6053. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  6054. DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, uint32_t deviceRelativeId )
  6055. : m_physicalDevice( *physicalDevice ), m_dispatcher( physicalDevice.getDispatcher() )
  6056. {
  6057. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( physicalDevice.getDispatcher()->vkGetWinrtDisplayNV(
  6058. static_cast<VkPhysicalDevice>( *physicalDevice ), deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &m_display ) ) );
  6059. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  6060. {
  6061. detail::throwResultException( result, "vkGetWinrtDisplayNV" );
  6062. }
  6063. }
  6064. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  6065. DisplayKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, VkDisplayKHR display )
  6066. : m_physicalDevice( *physicalDevice ), m_display( display ), m_dispatcher( physicalDevice.getDispatcher() )
  6067. {
  6068. }
  6069. DisplayKHR( std::nullptr_t ) {}
  6070. ~DisplayKHR()
  6071. {
  6072. clear();
  6073. }
  6074. DisplayKHR() = delete;
  6075. DisplayKHR( DisplayKHR const & ) = delete;
  6076. DisplayKHR( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT
  6077. : m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) )
  6078. , m_display( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_display, {} ) )
  6079. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  6080. {
  6081. }
  6082. DisplayKHR & operator=( DisplayKHR const & ) = delete;
  6083. DisplayKHR & operator =( DisplayKHR && rhs ) VULKAN_HPP_NOEXCEPT
  6084. {
  6085. if ( this != &rhs )
  6086. {
  6087. std::swap( m_physicalDevice, rhs.m_physicalDevice );
  6088. std::swap( m_display, rhs.m_display );
  6089. std::swap( m_dispatcher, rhs.m_dispatcher );
  6090. }
  6091. return *this;
  6092. }
  6093. VULKAN_HPP_NAMESPACE::DisplayKHR const & operator*() const VULKAN_HPP_NOEXCEPT
  6094. {
  6095. return m_display;
  6096. }
  6097. void clear() VULKAN_HPP_NOEXCEPT
  6098. {
  6099. if ( m_display )
  6100. {
  6101. getDispatcher()->vkReleaseDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ) );
  6102. }
  6103. m_physicalDevice = nullptr;
  6104. m_display = nullptr;
  6105. m_dispatcher = nullptr;
  6106. }
  6107. VULKAN_HPP_NAMESPACE::DisplayKHR release()
  6108. {
  6109. m_physicalDevice = nullptr;
  6110. m_dispatcher = nullptr;
  6111. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_display, nullptr );
  6112. }
  6113. VULKAN_HPP_NAMESPACE::PhysicalDevice getPhysicalDevice() const
  6114. {
  6115. return m_physicalDevice;
  6116. }
  6117. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const
  6118. {
  6119. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  6120. return m_dispatcher;
  6121. }
  6122. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR & rhs ) VULKAN_HPP_NOEXCEPT
  6123. {
  6124. std::swap( m_physicalDevice, rhs.m_physicalDevice );
  6125. std::swap( m_display, rhs.m_display );
  6126. std::swap( m_dispatcher, rhs.m_dispatcher );
  6127. }
  6128. //=== VK_KHR_display ===
  6129. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR> getModeProperties() const;
  6130. VULKAN_HPP_NODISCARD VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR
  6131. createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo,
  6132. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr ) const;
  6133. //=== VK_KHR_get_display_properties2 ===
  6134. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR> getModeProperties2() const;
  6135. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  6136. //=== VK_NV_acquire_winrt_display ===
  6137. void acquireWinrtNV() const;
  6138. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  6139. private:
  6140. VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {};
  6141. VULKAN_HPP_NAMESPACE::DisplayKHR m_display = {};
  6142. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr;
  6143. };
  6144. class DisplayKHRs : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR>
  6145. {
  6146. public:
  6147. DisplayKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice const & physicalDevice, uint32_t planeIndex )
  6148. {
  6149. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * dispatcher = physicalDevice.getDispatcher();
  6150. std::vector<VkDisplayKHR> displays;
  6151. uint32_t displayCount;
  6152. VULKAN_HPP_NAMESPACE::Result result;
  6153. do
  6154. {
  6155. result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  6156. dispatcher->vkGetDisplayPlaneSupportedDisplaysKHR( static_cast<VkPhysicalDevice>( *physicalDevice ), planeIndex, &displayCount, nullptr ) );
  6157. if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) && displayCount )
  6158. {
  6159. displays.resize( displayCount );
  6160. result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkGetDisplayPlaneSupportedDisplaysKHR(
  6161. static_cast<VkPhysicalDevice>( *physicalDevice ), planeIndex, &displayCount, displays.data() ) );
  6162. }
  6163. } while ( result == VULKAN_HPP_NAMESPACE::Result::eIncomplete );
  6164. if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
  6165. {
  6166. VULKAN_HPP_ASSERT( displayCount <= displays.size() );
  6167. this->reserve( displayCount );
  6168. for ( auto const & displayKHR : displays )
  6169. {
  6170. this->emplace_back( physicalDevice, displayKHR );
  6171. }
  6172. }
  6173. else
  6174. {
  6175. detail::throwResultException( result, "vkGetDisplayPlaneSupportedDisplaysKHR" );
  6176. }
  6177. }
  6178. DisplayKHRs( std::nullptr_t ) {}
  6179. DisplayKHRs() = delete;
  6180. DisplayKHRs( DisplayKHRs const & ) = delete;
  6181. DisplayKHRs( DisplayKHRs && rhs ) = default;
  6182. DisplayKHRs & operator=( DisplayKHRs const & ) = delete;
  6183. DisplayKHRs & operator=( DisplayKHRs && rhs ) = default;
  6184. };
  6185. class DisplayModeKHR
  6186. {
  6187. public:
  6188. using CType = VkDisplayModeKHR;
  6189. using CppType = vk::DisplayModeKHR;
  6190. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR;
  6191. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  6192. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR;
  6193. public:
  6194. DisplayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display,
  6195. VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo,
  6196. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  6197. : m_physicalDevice( display.getPhysicalDevice() ), m_dispatcher( display.getDispatcher() )
  6198. {
  6199. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( display.getDispatcher()->vkCreateDisplayModeKHR(
  6200. static_cast<VkPhysicalDevice>( display.getPhysicalDevice() ),
  6201. static_cast<VkDisplayKHR>( *display ),
  6202. reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
  6203. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  6204. reinterpret_cast<VkDisplayModeKHR *>( &m_displayModeKHR ) ) );
  6205. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  6206. {
  6207. detail::throwResultException( result, "vkCreateDisplayModeKHR" );
  6208. }
  6209. }
  6210. DisplayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display, VkDisplayModeKHR displayModeKHR )
  6211. : m_physicalDevice( display.getPhysicalDevice() ), m_displayModeKHR( displayModeKHR ), m_dispatcher( display.getDispatcher() )
  6212. {
  6213. }
  6214. DisplayModeKHR( std::nullptr_t ) {}
  6215. ~DisplayModeKHR()
  6216. {
  6217. clear();
  6218. }
  6219. DisplayModeKHR() = delete;
  6220. DisplayModeKHR( DisplayModeKHR const & rhs ) : m_displayModeKHR( rhs.m_displayModeKHR ), m_dispatcher( rhs.m_dispatcher ) {}
  6221. DisplayModeKHR( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT
  6222. : m_physicalDevice( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_physicalDevice, {} ) )
  6223. , m_displayModeKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_displayModeKHR, {} ) )
  6224. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  6225. {
  6226. }
  6227. DisplayModeKHR & operator=( DisplayModeKHR const & rhs )
  6228. {
  6229. m_displayModeKHR = rhs.m_displayModeKHR;
  6230. m_dispatcher = rhs.m_dispatcher;
  6231. return *this;
  6232. }
  6233. DisplayModeKHR & operator=( DisplayModeKHR && rhs ) VULKAN_HPP_NOEXCEPT
  6234. {
  6235. if ( this != &rhs )
  6236. {
  6237. std::swap( m_physicalDevice, rhs.m_physicalDevice );
  6238. std::swap( m_displayModeKHR, rhs.m_displayModeKHR );
  6239. std::swap( m_dispatcher, rhs.m_dispatcher );
  6240. }
  6241. return *this;
  6242. }
  6243. VULKAN_HPP_NAMESPACE::DisplayModeKHR const & operator*() const VULKAN_HPP_NOEXCEPT
  6244. {
  6245. return m_displayModeKHR;
  6246. }
  6247. void clear() VULKAN_HPP_NOEXCEPT
  6248. {
  6249. m_physicalDevice = nullptr;
  6250. m_displayModeKHR = nullptr;
  6251. m_dispatcher = nullptr;
  6252. }
  6253. VULKAN_HPP_NAMESPACE::DisplayModeKHR release()
  6254. {
  6255. m_physicalDevice = nullptr;
  6256. m_dispatcher = nullptr;
  6257. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_displayModeKHR, nullptr );
  6258. }
  6259. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const
  6260. {
  6261. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  6262. return m_dispatcher;
  6263. }
  6264. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR & rhs ) VULKAN_HPP_NOEXCEPT
  6265. {
  6266. std::swap( m_physicalDevice, rhs.m_physicalDevice );
  6267. std::swap( m_displayModeKHR, rhs.m_displayModeKHR );
  6268. std::swap( m_dispatcher, rhs.m_dispatcher );
  6269. }
  6270. //=== VK_KHR_display ===
  6271. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR getDisplayPlaneCapabilities( uint32_t planeIndex ) const;
  6272. private:
  6273. VULKAN_HPP_NAMESPACE::PhysicalDevice m_physicalDevice = {};
  6274. VULKAN_HPP_NAMESPACE::DisplayModeKHR m_displayModeKHR = {};
  6275. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr;
  6276. };
  6277. class Event
  6278. {
  6279. public:
  6280. using CType = VkEvent;
  6281. using CppType = vk::Event;
  6282. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent;
  6283. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  6284. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent;
  6285. public:
  6286. Event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  6287. VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo,
  6288. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  6289. : m_device( *device )
  6290. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  6291. , m_dispatcher( device.getDispatcher() )
  6292. {
  6293. VULKAN_HPP_NAMESPACE::Result result =
  6294. static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateEvent( static_cast<VkDevice>( *device ),
  6295. reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
  6296. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  6297. reinterpret_cast<VkEvent *>( &m_event ) ) );
  6298. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  6299. {
  6300. detail::throwResultException( result, "vkCreateEvent" );
  6301. }
  6302. }
  6303. Event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  6304. VkEvent event,
  6305. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  6306. : m_device( *device )
  6307. , m_event( event )
  6308. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  6309. , m_dispatcher( device.getDispatcher() )
  6310. {
  6311. }
  6312. Event( std::nullptr_t ) {}
  6313. ~Event()
  6314. {
  6315. clear();
  6316. }
  6317. Event() = delete;
  6318. Event( Event const & ) = delete;
  6319. Event( Event && rhs ) VULKAN_HPP_NOEXCEPT
  6320. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  6321. , m_event( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_event, {} ) )
  6322. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  6323. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  6324. {
  6325. }
  6326. Event & operator=( Event const & ) = delete;
  6327. Event & operator =( Event && rhs ) VULKAN_HPP_NOEXCEPT
  6328. {
  6329. if ( this != &rhs )
  6330. {
  6331. std::swap( m_device, rhs.m_device );
  6332. std::swap( m_event, rhs.m_event );
  6333. std::swap( m_allocator, rhs.m_allocator );
  6334. std::swap( m_dispatcher, rhs.m_dispatcher );
  6335. }
  6336. return *this;
  6337. }
  6338. VULKAN_HPP_NAMESPACE::Event const & operator*() const VULKAN_HPP_NOEXCEPT
  6339. {
  6340. return m_event;
  6341. }
  6342. void clear() VULKAN_HPP_NOEXCEPT
  6343. {
  6344. if ( m_event )
  6345. {
  6346. getDispatcher()->vkDestroyEvent(
  6347. static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  6348. }
  6349. m_device = nullptr;
  6350. m_event = nullptr;
  6351. m_allocator = nullptr;
  6352. m_dispatcher = nullptr;
  6353. }
  6354. VULKAN_HPP_NAMESPACE::Event release()
  6355. {
  6356. m_device = nullptr;
  6357. m_allocator = nullptr;
  6358. m_dispatcher = nullptr;
  6359. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_event, nullptr );
  6360. }
  6361. VULKAN_HPP_NAMESPACE::Device getDevice() const
  6362. {
  6363. return m_device;
  6364. }
  6365. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  6366. {
  6367. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  6368. return m_dispatcher;
  6369. }
  6370. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Event & rhs ) VULKAN_HPP_NOEXCEPT
  6371. {
  6372. std::swap( m_device, rhs.m_device );
  6373. std::swap( m_event, rhs.m_event );
  6374. std::swap( m_allocator, rhs.m_allocator );
  6375. std::swap( m_dispatcher, rhs.m_dispatcher );
  6376. }
  6377. //=== VK_VERSION_1_0 ===
  6378. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const;
  6379. void set() const;
  6380. void reset() const;
  6381. private:
  6382. VULKAN_HPP_NAMESPACE::Device m_device = {};
  6383. VULKAN_HPP_NAMESPACE::Event m_event = {};
  6384. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  6385. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  6386. };
  6387. class Fence
  6388. {
  6389. public:
  6390. using CType = VkFence;
  6391. using CppType = vk::Fence;
  6392. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFence;
  6393. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  6394. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence;
  6395. public:
  6396. Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  6397. VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo,
  6398. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  6399. : m_device( *device )
  6400. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  6401. , m_dispatcher( device.getDispatcher() )
  6402. {
  6403. VULKAN_HPP_NAMESPACE::Result result =
  6404. static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateFence( static_cast<VkDevice>( *device ),
  6405. reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
  6406. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  6407. reinterpret_cast<VkFence *>( &m_fence ) ) );
  6408. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  6409. {
  6410. detail::throwResultException( result, "vkCreateFence" );
  6411. }
  6412. }
  6413. Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  6414. VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo,
  6415. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  6416. : m_device( *device )
  6417. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  6418. , m_dispatcher( device.getDispatcher() )
  6419. {
  6420. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  6421. device.getDispatcher()->vkRegisterDeviceEventEXT( static_cast<VkDevice>( *device ),
  6422. reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
  6423. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  6424. reinterpret_cast<VkFence *>( &m_fence ) ) );
  6425. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  6426. {
  6427. detail::throwResultException( result, "vkRegisterDeviceEventEXT" );
  6428. }
  6429. }
  6430. Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  6431. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display,
  6432. VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo,
  6433. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  6434. : m_device( *device )
  6435. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  6436. , m_dispatcher( device.getDispatcher() )
  6437. {
  6438. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  6439. device.getDispatcher()->vkRegisterDisplayEventEXT( static_cast<VkDevice>( *device ),
  6440. static_cast<VkDisplayKHR>( *display ),
  6441. reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
  6442. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  6443. reinterpret_cast<VkFence *>( &m_fence ) ) );
  6444. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  6445. {
  6446. detail::throwResultException( result, "vkRegisterDisplayEventEXT" );
  6447. }
  6448. }
  6449. Fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  6450. VkFence fence,
  6451. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  6452. : m_device( *device )
  6453. , m_fence( fence )
  6454. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  6455. , m_dispatcher( device.getDispatcher() )
  6456. {
  6457. }
  6458. Fence( std::nullptr_t ) {}
  6459. ~Fence()
  6460. {
  6461. clear();
  6462. }
  6463. Fence() = delete;
  6464. Fence( Fence const & ) = delete;
  6465. Fence( Fence && rhs ) VULKAN_HPP_NOEXCEPT
  6466. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  6467. , m_fence( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_fence, {} ) )
  6468. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  6469. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  6470. {
  6471. }
  6472. Fence & operator=( Fence const & ) = delete;
  6473. Fence & operator =( Fence && rhs ) VULKAN_HPP_NOEXCEPT
  6474. {
  6475. if ( this != &rhs )
  6476. {
  6477. std::swap( m_device, rhs.m_device );
  6478. std::swap( m_fence, rhs.m_fence );
  6479. std::swap( m_allocator, rhs.m_allocator );
  6480. std::swap( m_dispatcher, rhs.m_dispatcher );
  6481. }
  6482. return *this;
  6483. }
  6484. VULKAN_HPP_NAMESPACE::Fence const & operator*() const VULKAN_HPP_NOEXCEPT
  6485. {
  6486. return m_fence;
  6487. }
  6488. void clear() VULKAN_HPP_NOEXCEPT
  6489. {
  6490. if ( m_fence )
  6491. {
  6492. getDispatcher()->vkDestroyFence(
  6493. static_cast<VkDevice>( m_device ), static_cast<VkFence>( m_fence ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  6494. }
  6495. m_device = nullptr;
  6496. m_fence = nullptr;
  6497. m_allocator = nullptr;
  6498. m_dispatcher = nullptr;
  6499. }
  6500. VULKAN_HPP_NAMESPACE::Fence release()
  6501. {
  6502. m_device = nullptr;
  6503. m_allocator = nullptr;
  6504. m_dispatcher = nullptr;
  6505. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_fence, nullptr );
  6506. }
  6507. VULKAN_HPP_NAMESPACE::Device getDevice() const
  6508. {
  6509. return m_device;
  6510. }
  6511. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  6512. {
  6513. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  6514. return m_dispatcher;
  6515. }
  6516. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Fence & rhs ) VULKAN_HPP_NOEXCEPT
  6517. {
  6518. std::swap( m_device, rhs.m_device );
  6519. std::swap( m_fence, rhs.m_fence );
  6520. std::swap( m_allocator, rhs.m_allocator );
  6521. std::swap( m_dispatcher, rhs.m_dispatcher );
  6522. }
  6523. //=== VK_VERSION_1_0 ===
  6524. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const;
  6525. private:
  6526. VULKAN_HPP_NAMESPACE::Device m_device = {};
  6527. VULKAN_HPP_NAMESPACE::Fence m_fence = {};
  6528. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  6529. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  6530. };
  6531. class Framebuffer
  6532. {
  6533. public:
  6534. using CType = VkFramebuffer;
  6535. using CppType = vk::Framebuffer;
  6536. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer;
  6537. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  6538. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer;
  6539. public:
  6540. Framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  6541. VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo,
  6542. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  6543. : m_device( *device )
  6544. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  6545. , m_dispatcher( device.getDispatcher() )
  6546. {
  6547. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  6548. device.getDispatcher()->vkCreateFramebuffer( static_cast<VkDevice>( *device ),
  6549. reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
  6550. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  6551. reinterpret_cast<VkFramebuffer *>( &m_framebuffer ) ) );
  6552. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  6553. {
  6554. detail::throwResultException( result, "vkCreateFramebuffer" );
  6555. }
  6556. }
  6557. Framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  6558. VkFramebuffer framebuffer,
  6559. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  6560. : m_device( *device )
  6561. , m_framebuffer( framebuffer )
  6562. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  6563. , m_dispatcher( device.getDispatcher() )
  6564. {
  6565. }
  6566. Framebuffer( std::nullptr_t ) {}
  6567. ~Framebuffer()
  6568. {
  6569. clear();
  6570. }
  6571. Framebuffer() = delete;
  6572. Framebuffer( Framebuffer const & ) = delete;
  6573. Framebuffer( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT
  6574. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  6575. , m_framebuffer( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_framebuffer, {} ) )
  6576. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  6577. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  6578. {
  6579. }
  6580. Framebuffer & operator=( Framebuffer const & ) = delete;
  6581. Framebuffer & operator =( Framebuffer && rhs ) VULKAN_HPP_NOEXCEPT
  6582. {
  6583. if ( this != &rhs )
  6584. {
  6585. std::swap( m_device, rhs.m_device );
  6586. std::swap( m_framebuffer, rhs.m_framebuffer );
  6587. std::swap( m_allocator, rhs.m_allocator );
  6588. std::swap( m_dispatcher, rhs.m_dispatcher );
  6589. }
  6590. return *this;
  6591. }
  6592. VULKAN_HPP_NAMESPACE::Framebuffer const & operator*() const VULKAN_HPP_NOEXCEPT
  6593. {
  6594. return m_framebuffer;
  6595. }
  6596. void clear() VULKAN_HPP_NOEXCEPT
  6597. {
  6598. if ( m_framebuffer )
  6599. {
  6600. getDispatcher()->vkDestroyFramebuffer(
  6601. static_cast<VkDevice>( m_device ), static_cast<VkFramebuffer>( m_framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  6602. }
  6603. m_device = nullptr;
  6604. m_framebuffer = nullptr;
  6605. m_allocator = nullptr;
  6606. m_dispatcher = nullptr;
  6607. }
  6608. VULKAN_HPP_NAMESPACE::Framebuffer release()
  6609. {
  6610. m_device = nullptr;
  6611. m_allocator = nullptr;
  6612. m_dispatcher = nullptr;
  6613. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_framebuffer, nullptr );
  6614. }
  6615. VULKAN_HPP_NAMESPACE::Device getDevice() const
  6616. {
  6617. return m_device;
  6618. }
  6619. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  6620. {
  6621. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  6622. return m_dispatcher;
  6623. }
  6624. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Framebuffer & rhs ) VULKAN_HPP_NOEXCEPT
  6625. {
  6626. std::swap( m_device, rhs.m_device );
  6627. std::swap( m_framebuffer, rhs.m_framebuffer );
  6628. std::swap( m_allocator, rhs.m_allocator );
  6629. std::swap( m_dispatcher, rhs.m_dispatcher );
  6630. }
  6631. //=== VK_QCOM_tile_properties ===
  6632. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM> getTilePropertiesQCOM() const;
  6633. private:
  6634. VULKAN_HPP_NAMESPACE::Device m_device = {};
  6635. VULKAN_HPP_NAMESPACE::Framebuffer m_framebuffer = {};
  6636. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  6637. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  6638. };
  6639. class Image
  6640. {
  6641. public:
  6642. using CType = VkImage;
  6643. using CppType = vk::Image;
  6644. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImage;
  6645. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  6646. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage;
  6647. public:
  6648. Image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  6649. VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo,
  6650. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  6651. : m_device( *device )
  6652. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  6653. , m_dispatcher( device.getDispatcher() )
  6654. {
  6655. VULKAN_HPP_NAMESPACE::Result result =
  6656. static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateImage( static_cast<VkDevice>( *device ),
  6657. reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
  6658. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  6659. reinterpret_cast<VkImage *>( &m_image ) ) );
  6660. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  6661. {
  6662. detail::throwResultException( result, "vkCreateImage" );
  6663. }
  6664. }
  6665. Image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  6666. VkImage image,
  6667. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  6668. : m_device( *device )
  6669. , m_image( image )
  6670. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  6671. , m_dispatcher( device.getDispatcher() )
  6672. {
  6673. }
  6674. Image( std::nullptr_t ) {}
  6675. ~Image()
  6676. {
  6677. clear();
  6678. }
  6679. Image() = delete;
  6680. Image( Image const & ) = delete;
  6681. Image( Image && rhs ) VULKAN_HPP_NOEXCEPT
  6682. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  6683. , m_image( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_image, {} ) )
  6684. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  6685. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  6686. {
  6687. }
  6688. Image & operator=( Image const & ) = delete;
  6689. Image & operator =( Image && rhs ) VULKAN_HPP_NOEXCEPT
  6690. {
  6691. if ( this != &rhs )
  6692. {
  6693. std::swap( m_device, rhs.m_device );
  6694. std::swap( m_image, rhs.m_image );
  6695. std::swap( m_allocator, rhs.m_allocator );
  6696. std::swap( m_dispatcher, rhs.m_dispatcher );
  6697. }
  6698. return *this;
  6699. }
  6700. VULKAN_HPP_NAMESPACE::Image const & operator*() const VULKAN_HPP_NOEXCEPT
  6701. {
  6702. return m_image;
  6703. }
  6704. void clear() VULKAN_HPP_NOEXCEPT
  6705. {
  6706. if ( m_image )
  6707. {
  6708. getDispatcher()->vkDestroyImage(
  6709. static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  6710. }
  6711. m_device = nullptr;
  6712. m_image = nullptr;
  6713. m_allocator = nullptr;
  6714. m_dispatcher = nullptr;
  6715. }
  6716. VULKAN_HPP_NAMESPACE::Image release()
  6717. {
  6718. m_device = nullptr;
  6719. m_allocator = nullptr;
  6720. m_dispatcher = nullptr;
  6721. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_image, nullptr );
  6722. }
  6723. VULKAN_HPP_NAMESPACE::Device getDevice() const
  6724. {
  6725. return m_device;
  6726. }
  6727. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  6728. {
  6729. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  6730. return m_dispatcher;
  6731. }
  6732. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Image & rhs ) VULKAN_HPP_NOEXCEPT
  6733. {
  6734. std::swap( m_device, rhs.m_device );
  6735. std::swap( m_image, rhs.m_image );
  6736. std::swap( m_allocator, rhs.m_allocator );
  6737. std::swap( m_dispatcher, rhs.m_dispatcher );
  6738. }
  6739. //=== VK_VERSION_1_0 ===
  6740. void bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const;
  6741. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getMemoryRequirements() const VULKAN_HPP_NOEXCEPT;
  6742. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements> getSparseMemoryRequirements() const;
  6743. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout
  6744. getSubresourceLayout( const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const VULKAN_HPP_NOEXCEPT;
  6745. //=== VK_EXT_image_drm_format_modifier ===
  6746. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT getDrmFormatModifierPropertiesEXT() const;
  6747. //=== VK_EXT_host_image_copy ===
  6748. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
  6749. getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT;
  6750. template <typename X, typename Y, typename... Z>
  6751. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  6752. getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT;
  6753. //=== VK_KHR_maintenance5 ===
  6754. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
  6755. getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT;
  6756. template <typename X, typename Y, typename... Z>
  6757. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  6758. getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT;
  6759. private:
  6760. VULKAN_HPP_NAMESPACE::Device m_device = {};
  6761. VULKAN_HPP_NAMESPACE::Image m_image = {};
  6762. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  6763. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  6764. };
  6765. class ImageView
  6766. {
  6767. public:
  6768. using CType = VkImageView;
  6769. using CppType = vk::ImageView;
  6770. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView;
  6771. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  6772. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView;
  6773. public:
  6774. ImageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  6775. VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo,
  6776. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  6777. : m_device( *device )
  6778. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  6779. , m_dispatcher( device.getDispatcher() )
  6780. {
  6781. VULKAN_HPP_NAMESPACE::Result result =
  6782. static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateImageView( static_cast<VkDevice>( *device ),
  6783. reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
  6784. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  6785. reinterpret_cast<VkImageView *>( &m_imageView ) ) );
  6786. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  6787. {
  6788. detail::throwResultException( result, "vkCreateImageView" );
  6789. }
  6790. }
  6791. ImageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  6792. VkImageView imageView,
  6793. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  6794. : m_device( *device )
  6795. , m_imageView( imageView )
  6796. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  6797. , m_dispatcher( device.getDispatcher() )
  6798. {
  6799. }
  6800. ImageView( std::nullptr_t ) {}
  6801. ~ImageView()
  6802. {
  6803. clear();
  6804. }
  6805. ImageView() = delete;
  6806. ImageView( ImageView const & ) = delete;
  6807. ImageView( ImageView && rhs ) VULKAN_HPP_NOEXCEPT
  6808. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  6809. , m_imageView( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_imageView, {} ) )
  6810. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  6811. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  6812. {
  6813. }
  6814. ImageView & operator=( ImageView const & ) = delete;
  6815. ImageView & operator =( ImageView && rhs ) VULKAN_HPP_NOEXCEPT
  6816. {
  6817. if ( this != &rhs )
  6818. {
  6819. std::swap( m_device, rhs.m_device );
  6820. std::swap( m_imageView, rhs.m_imageView );
  6821. std::swap( m_allocator, rhs.m_allocator );
  6822. std::swap( m_dispatcher, rhs.m_dispatcher );
  6823. }
  6824. return *this;
  6825. }
  6826. VULKAN_HPP_NAMESPACE::ImageView const & operator*() const VULKAN_HPP_NOEXCEPT
  6827. {
  6828. return m_imageView;
  6829. }
  6830. void clear() VULKAN_HPP_NOEXCEPT
  6831. {
  6832. if ( m_imageView )
  6833. {
  6834. getDispatcher()->vkDestroyImageView(
  6835. static_cast<VkDevice>( m_device ), static_cast<VkImageView>( m_imageView ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  6836. }
  6837. m_device = nullptr;
  6838. m_imageView = nullptr;
  6839. m_allocator = nullptr;
  6840. m_dispatcher = nullptr;
  6841. }
  6842. VULKAN_HPP_NAMESPACE::ImageView release()
  6843. {
  6844. m_device = nullptr;
  6845. m_allocator = nullptr;
  6846. m_dispatcher = nullptr;
  6847. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_imageView, nullptr );
  6848. }
  6849. VULKAN_HPP_NAMESPACE::Device getDevice() const
  6850. {
  6851. return m_device;
  6852. }
  6853. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  6854. {
  6855. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  6856. return m_dispatcher;
  6857. }
  6858. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ImageView & rhs ) VULKAN_HPP_NOEXCEPT
  6859. {
  6860. std::swap( m_device, rhs.m_device );
  6861. std::swap( m_imageView, rhs.m_imageView );
  6862. std::swap( m_allocator, rhs.m_allocator );
  6863. std::swap( m_dispatcher, rhs.m_dispatcher );
  6864. }
  6865. //=== VK_NVX_image_view_handle ===
  6866. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX getAddressNVX() const;
  6867. private:
  6868. VULKAN_HPP_NAMESPACE::Device m_device = {};
  6869. VULKAN_HPP_NAMESPACE::ImageView m_imageView = {};
  6870. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  6871. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  6872. };
  6873. class IndirectCommandsLayoutNV
  6874. {
  6875. public:
  6876. using CType = VkIndirectCommandsLayoutNV;
  6877. using CppType = vk::IndirectCommandsLayoutNV;
  6878. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV;
  6879. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  6880. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
  6881. public:
  6882. IndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  6883. VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo,
  6884. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  6885. : m_device( *device )
  6886. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  6887. , m_dispatcher( device.getDispatcher() )
  6888. {
  6889. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  6890. device.getDispatcher()->vkCreateIndirectCommandsLayoutNV( static_cast<VkDevice>( *device ),
  6891. reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
  6892. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  6893. reinterpret_cast<VkIndirectCommandsLayoutNV *>( &m_indirectCommandsLayout ) ) );
  6894. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  6895. {
  6896. detail::throwResultException( result, "vkCreateIndirectCommandsLayoutNV" );
  6897. }
  6898. }
  6899. IndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  6900. VkIndirectCommandsLayoutNV indirectCommandsLayout,
  6901. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  6902. : m_device( *device )
  6903. , m_indirectCommandsLayout( indirectCommandsLayout )
  6904. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  6905. , m_dispatcher( device.getDispatcher() )
  6906. {
  6907. }
  6908. IndirectCommandsLayoutNV( std::nullptr_t ) {}
  6909. ~IndirectCommandsLayoutNV()
  6910. {
  6911. clear();
  6912. }
  6913. IndirectCommandsLayoutNV() = delete;
  6914. IndirectCommandsLayoutNV( IndirectCommandsLayoutNV const & ) = delete;
  6915. IndirectCommandsLayoutNV( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT
  6916. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  6917. , m_indirectCommandsLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_indirectCommandsLayout, {} ) )
  6918. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  6919. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  6920. {
  6921. }
  6922. IndirectCommandsLayoutNV & operator=( IndirectCommandsLayoutNV const & ) = delete;
  6923. IndirectCommandsLayoutNV & operator =( IndirectCommandsLayoutNV && rhs ) VULKAN_HPP_NOEXCEPT
  6924. {
  6925. if ( this != &rhs )
  6926. {
  6927. std::swap( m_device, rhs.m_device );
  6928. std::swap( m_indirectCommandsLayout, rhs.m_indirectCommandsLayout );
  6929. std::swap( m_allocator, rhs.m_allocator );
  6930. std::swap( m_dispatcher, rhs.m_dispatcher );
  6931. }
  6932. return *this;
  6933. }
  6934. VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const & operator*() const VULKAN_HPP_NOEXCEPT
  6935. {
  6936. return m_indirectCommandsLayout;
  6937. }
  6938. void clear() VULKAN_HPP_NOEXCEPT
  6939. {
  6940. if ( m_indirectCommandsLayout )
  6941. {
  6942. getDispatcher()->vkDestroyIndirectCommandsLayoutNV( static_cast<VkDevice>( m_device ),
  6943. static_cast<VkIndirectCommandsLayoutNV>( m_indirectCommandsLayout ),
  6944. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  6945. }
  6946. m_device = nullptr;
  6947. m_indirectCommandsLayout = nullptr;
  6948. m_allocator = nullptr;
  6949. m_dispatcher = nullptr;
  6950. }
  6951. VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV release()
  6952. {
  6953. m_device = nullptr;
  6954. m_allocator = nullptr;
  6955. m_dispatcher = nullptr;
  6956. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_indirectCommandsLayout, nullptr );
  6957. }
  6958. VULKAN_HPP_NAMESPACE::Device getDevice() const
  6959. {
  6960. return m_device;
  6961. }
  6962. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  6963. {
  6964. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  6965. return m_dispatcher;
  6966. }
  6967. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV & rhs ) VULKAN_HPP_NOEXCEPT
  6968. {
  6969. std::swap( m_device, rhs.m_device );
  6970. std::swap( m_indirectCommandsLayout, rhs.m_indirectCommandsLayout );
  6971. std::swap( m_allocator, rhs.m_allocator );
  6972. std::swap( m_dispatcher, rhs.m_dispatcher );
  6973. }
  6974. private:
  6975. VULKAN_HPP_NAMESPACE::Device m_device = {};
  6976. VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV m_indirectCommandsLayout = {};
  6977. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  6978. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  6979. };
  6980. class MicromapEXT
  6981. {
  6982. public:
  6983. using CType = VkMicromapEXT;
  6984. using CppType = vk::MicromapEXT;
  6985. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eMicromapEXT;
  6986. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  6987. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
  6988. public:
  6989. MicromapEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  6990. VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo,
  6991. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  6992. : m_device( *device )
  6993. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  6994. , m_dispatcher( device.getDispatcher() )
  6995. {
  6996. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  6997. device.getDispatcher()->vkCreateMicromapEXT( static_cast<VkDevice>( *device ),
  6998. reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ),
  6999. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  7000. reinterpret_cast<VkMicromapEXT *>( &m_micromap ) ) );
  7001. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  7002. {
  7003. detail::throwResultException( result, "vkCreateMicromapEXT" );
  7004. }
  7005. }
  7006. MicromapEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7007. VkMicromapEXT micromap,
  7008. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7009. : m_device( *device )
  7010. , m_micromap( micromap )
  7011. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  7012. , m_dispatcher( device.getDispatcher() )
  7013. {
  7014. }
  7015. MicromapEXT( std::nullptr_t ) {}
  7016. ~MicromapEXT()
  7017. {
  7018. clear();
  7019. }
  7020. MicromapEXT() = delete;
  7021. MicromapEXT( MicromapEXT const & ) = delete;
  7022. MicromapEXT( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT
  7023. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  7024. , m_micromap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_micromap, {} ) )
  7025. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  7026. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  7027. {
  7028. }
  7029. MicromapEXT & operator=( MicromapEXT const & ) = delete;
  7030. MicromapEXT & operator =( MicromapEXT && rhs ) VULKAN_HPP_NOEXCEPT
  7031. {
  7032. if ( this != &rhs )
  7033. {
  7034. std::swap( m_device, rhs.m_device );
  7035. std::swap( m_micromap, rhs.m_micromap );
  7036. std::swap( m_allocator, rhs.m_allocator );
  7037. std::swap( m_dispatcher, rhs.m_dispatcher );
  7038. }
  7039. return *this;
  7040. }
  7041. VULKAN_HPP_NAMESPACE::MicromapEXT const & operator*() const VULKAN_HPP_NOEXCEPT
  7042. {
  7043. return m_micromap;
  7044. }
  7045. void clear() VULKAN_HPP_NOEXCEPT
  7046. {
  7047. if ( m_micromap )
  7048. {
  7049. getDispatcher()->vkDestroyMicromapEXT(
  7050. static_cast<VkDevice>( m_device ), static_cast<VkMicromapEXT>( m_micromap ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  7051. }
  7052. m_device = nullptr;
  7053. m_micromap = nullptr;
  7054. m_allocator = nullptr;
  7055. m_dispatcher = nullptr;
  7056. }
  7057. VULKAN_HPP_NAMESPACE::MicromapEXT release()
  7058. {
  7059. m_device = nullptr;
  7060. m_allocator = nullptr;
  7061. m_dispatcher = nullptr;
  7062. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_micromap, nullptr );
  7063. }
  7064. VULKAN_HPP_NAMESPACE::Device getDevice() const
  7065. {
  7066. return m_device;
  7067. }
  7068. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  7069. {
  7070. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  7071. return m_dispatcher;
  7072. }
  7073. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::MicromapEXT & rhs ) VULKAN_HPP_NOEXCEPT
  7074. {
  7075. std::swap( m_device, rhs.m_device );
  7076. std::swap( m_micromap, rhs.m_micromap );
  7077. std::swap( m_allocator, rhs.m_allocator );
  7078. std::swap( m_dispatcher, rhs.m_dispatcher );
  7079. }
  7080. private:
  7081. VULKAN_HPP_NAMESPACE::Device m_device = {};
  7082. VULKAN_HPP_NAMESPACE::MicromapEXT m_micromap = {};
  7083. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  7084. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  7085. };
  7086. class OpticalFlowSessionNV
  7087. {
  7088. public:
  7089. using CType = VkOpticalFlowSessionNV;
  7090. using CppType = vk::OpticalFlowSessionNV;
  7091. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eOpticalFlowSessionNV;
  7092. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  7093. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
  7094. public:
  7095. OpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7096. VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo,
  7097. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7098. : m_device( *device )
  7099. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  7100. , m_dispatcher( device.getDispatcher() )
  7101. {
  7102. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  7103. device.getDispatcher()->vkCreateOpticalFlowSessionNV( static_cast<VkDevice>( *device ),
  7104. reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ),
  7105. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  7106. reinterpret_cast<VkOpticalFlowSessionNV *>( &m_session ) ) );
  7107. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  7108. {
  7109. detail::throwResultException( result, "vkCreateOpticalFlowSessionNV" );
  7110. }
  7111. }
  7112. OpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7113. VkOpticalFlowSessionNV session,
  7114. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7115. : m_device( *device )
  7116. , m_session( session )
  7117. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  7118. , m_dispatcher( device.getDispatcher() )
  7119. {
  7120. }
  7121. OpticalFlowSessionNV( std::nullptr_t ) {}
  7122. ~OpticalFlowSessionNV()
  7123. {
  7124. clear();
  7125. }
  7126. OpticalFlowSessionNV() = delete;
  7127. OpticalFlowSessionNV( OpticalFlowSessionNV const & ) = delete;
  7128. OpticalFlowSessionNV( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT
  7129. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  7130. , m_session( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_session, {} ) )
  7131. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  7132. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  7133. {
  7134. }
  7135. OpticalFlowSessionNV & operator=( OpticalFlowSessionNV const & ) = delete;
  7136. OpticalFlowSessionNV & operator =( OpticalFlowSessionNV && rhs ) VULKAN_HPP_NOEXCEPT
  7137. {
  7138. if ( this != &rhs )
  7139. {
  7140. std::swap( m_device, rhs.m_device );
  7141. std::swap( m_session, rhs.m_session );
  7142. std::swap( m_allocator, rhs.m_allocator );
  7143. std::swap( m_dispatcher, rhs.m_dispatcher );
  7144. }
  7145. return *this;
  7146. }
  7147. VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV const & operator*() const VULKAN_HPP_NOEXCEPT
  7148. {
  7149. return m_session;
  7150. }
  7151. void clear() VULKAN_HPP_NOEXCEPT
  7152. {
  7153. if ( m_session )
  7154. {
  7155. getDispatcher()->vkDestroyOpticalFlowSessionNV( static_cast<VkDevice>( m_device ),
  7156. static_cast<VkOpticalFlowSessionNV>( m_session ),
  7157. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  7158. }
  7159. m_device = nullptr;
  7160. m_session = nullptr;
  7161. m_allocator = nullptr;
  7162. m_dispatcher = nullptr;
  7163. }
  7164. VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV release()
  7165. {
  7166. m_device = nullptr;
  7167. m_allocator = nullptr;
  7168. m_dispatcher = nullptr;
  7169. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_session, nullptr );
  7170. }
  7171. VULKAN_HPP_NAMESPACE::Device getDevice() const
  7172. {
  7173. return m_device;
  7174. }
  7175. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  7176. {
  7177. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  7178. return m_dispatcher;
  7179. }
  7180. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV & rhs ) VULKAN_HPP_NOEXCEPT
  7181. {
  7182. std::swap( m_device, rhs.m_device );
  7183. std::swap( m_session, rhs.m_session );
  7184. std::swap( m_allocator, rhs.m_allocator );
  7185. std::swap( m_dispatcher, rhs.m_dispatcher );
  7186. }
  7187. //=== VK_NV_optical_flow ===
  7188. void bindImage( VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,
  7189. VULKAN_HPP_NAMESPACE::ImageView view,
  7190. VULKAN_HPP_NAMESPACE::ImageLayout layout ) const;
  7191. private:
  7192. VULKAN_HPP_NAMESPACE::Device m_device = {};
  7193. VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV m_session = {};
  7194. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  7195. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  7196. };
  7197. class PerformanceConfigurationINTEL
  7198. {
  7199. public:
  7200. using CType = VkPerformanceConfigurationINTEL;
  7201. using CppType = vk::PerformanceConfigurationINTEL;
  7202. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL;
  7203. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  7204. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
  7205. public:
  7206. PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7207. VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo )
  7208. : m_device( *device ), m_dispatcher( device.getDispatcher() )
  7209. {
  7210. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  7211. device.getDispatcher()->vkAcquirePerformanceConfigurationINTEL( static_cast<VkDevice>( *device ),
  7212. reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
  7213. reinterpret_cast<VkPerformanceConfigurationINTEL *>( &m_configuration ) ) );
  7214. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  7215. {
  7216. detail::throwResultException( result, "vkAcquirePerformanceConfigurationINTEL" );
  7217. }
  7218. }
  7219. PerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkPerformanceConfigurationINTEL configuration )
  7220. : m_device( *device ), m_configuration( configuration ), m_dispatcher( device.getDispatcher() )
  7221. {
  7222. }
  7223. PerformanceConfigurationINTEL( std::nullptr_t ) {}
  7224. ~PerformanceConfigurationINTEL()
  7225. {
  7226. clear();
  7227. }
  7228. PerformanceConfigurationINTEL() = delete;
  7229. PerformanceConfigurationINTEL( PerformanceConfigurationINTEL const & ) = delete;
  7230. PerformanceConfigurationINTEL( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT
  7231. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  7232. , m_configuration( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_configuration, {} ) )
  7233. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  7234. {
  7235. }
  7236. PerformanceConfigurationINTEL & operator=( PerformanceConfigurationINTEL const & ) = delete;
  7237. PerformanceConfigurationINTEL & operator =( PerformanceConfigurationINTEL && rhs ) VULKAN_HPP_NOEXCEPT
  7238. {
  7239. if ( this != &rhs )
  7240. {
  7241. std::swap( m_device, rhs.m_device );
  7242. std::swap( m_configuration, rhs.m_configuration );
  7243. std::swap( m_dispatcher, rhs.m_dispatcher );
  7244. }
  7245. return *this;
  7246. }
  7247. VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const & operator*() const VULKAN_HPP_NOEXCEPT
  7248. {
  7249. return m_configuration;
  7250. }
  7251. void clear() VULKAN_HPP_NOEXCEPT
  7252. {
  7253. if ( m_configuration )
  7254. {
  7255. getDispatcher()->vkReleasePerformanceConfigurationINTEL( static_cast<VkDevice>( m_device ),
  7256. static_cast<VkPerformanceConfigurationINTEL>( m_configuration ) );
  7257. }
  7258. m_device = nullptr;
  7259. m_configuration = nullptr;
  7260. m_dispatcher = nullptr;
  7261. }
  7262. VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL release()
  7263. {
  7264. m_device = nullptr;
  7265. m_dispatcher = nullptr;
  7266. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_configuration, nullptr );
  7267. }
  7268. VULKAN_HPP_NAMESPACE::Device getDevice() const
  7269. {
  7270. return m_device;
  7271. }
  7272. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  7273. {
  7274. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  7275. return m_dispatcher;
  7276. }
  7277. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL & rhs ) VULKAN_HPP_NOEXCEPT
  7278. {
  7279. std::swap( m_device, rhs.m_device );
  7280. std::swap( m_configuration, rhs.m_configuration );
  7281. std::swap( m_dispatcher, rhs.m_dispatcher );
  7282. }
  7283. private:
  7284. VULKAN_HPP_NAMESPACE::Device m_device = {};
  7285. VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL m_configuration = {};
  7286. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  7287. };
  7288. class PipelineCache
  7289. {
  7290. public:
  7291. using CType = VkPipelineCache;
  7292. using CppType = vk::PipelineCache;
  7293. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache;
  7294. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  7295. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache;
  7296. public:
  7297. PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7298. VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo,
  7299. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7300. : m_device( *device )
  7301. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  7302. , m_dispatcher( device.getDispatcher() )
  7303. {
  7304. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  7305. device.getDispatcher()->vkCreatePipelineCache( static_cast<VkDevice>( *device ),
  7306. reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
  7307. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  7308. reinterpret_cast<VkPipelineCache *>( &m_pipelineCache ) ) );
  7309. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  7310. {
  7311. detail::throwResultException( result, "vkCreatePipelineCache" );
  7312. }
  7313. }
  7314. PipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7315. VkPipelineCache pipelineCache,
  7316. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7317. : m_device( *device )
  7318. , m_pipelineCache( pipelineCache )
  7319. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  7320. , m_dispatcher( device.getDispatcher() )
  7321. {
  7322. }
  7323. PipelineCache( std::nullptr_t ) {}
  7324. ~PipelineCache()
  7325. {
  7326. clear();
  7327. }
  7328. PipelineCache() = delete;
  7329. PipelineCache( PipelineCache const & ) = delete;
  7330. PipelineCache( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT
  7331. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  7332. , m_pipelineCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineCache, {} ) )
  7333. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  7334. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  7335. {
  7336. }
  7337. PipelineCache & operator=( PipelineCache const & ) = delete;
  7338. PipelineCache & operator =( PipelineCache && rhs ) VULKAN_HPP_NOEXCEPT
  7339. {
  7340. if ( this != &rhs )
  7341. {
  7342. std::swap( m_device, rhs.m_device );
  7343. std::swap( m_pipelineCache, rhs.m_pipelineCache );
  7344. std::swap( m_allocator, rhs.m_allocator );
  7345. std::swap( m_dispatcher, rhs.m_dispatcher );
  7346. }
  7347. return *this;
  7348. }
  7349. VULKAN_HPP_NAMESPACE::PipelineCache const & operator*() const VULKAN_HPP_NOEXCEPT
  7350. {
  7351. return m_pipelineCache;
  7352. }
  7353. void clear() VULKAN_HPP_NOEXCEPT
  7354. {
  7355. if ( m_pipelineCache )
  7356. {
  7357. getDispatcher()->vkDestroyPipelineCache( static_cast<VkDevice>( m_device ),
  7358. static_cast<VkPipelineCache>( m_pipelineCache ),
  7359. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  7360. }
  7361. m_device = nullptr;
  7362. m_pipelineCache = nullptr;
  7363. m_allocator = nullptr;
  7364. m_dispatcher = nullptr;
  7365. }
  7366. VULKAN_HPP_NAMESPACE::PipelineCache release()
  7367. {
  7368. m_device = nullptr;
  7369. m_allocator = nullptr;
  7370. m_dispatcher = nullptr;
  7371. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_pipelineCache, nullptr );
  7372. }
  7373. VULKAN_HPP_NAMESPACE::Device getDevice() const
  7374. {
  7375. return m_device;
  7376. }
  7377. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  7378. {
  7379. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  7380. return m_dispatcher;
  7381. }
  7382. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache & rhs ) VULKAN_HPP_NOEXCEPT
  7383. {
  7384. std::swap( m_device, rhs.m_device );
  7385. std::swap( m_pipelineCache, rhs.m_pipelineCache );
  7386. std::swap( m_allocator, rhs.m_allocator );
  7387. std::swap( m_dispatcher, rhs.m_dispatcher );
  7388. }
  7389. //=== VK_VERSION_1_0 ===
  7390. VULKAN_HPP_NODISCARD std::vector<uint8_t> getData() const;
  7391. void merge( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches ) const;
  7392. private:
  7393. VULKAN_HPP_NAMESPACE::Device m_device = {};
  7394. VULKAN_HPP_NAMESPACE::PipelineCache m_pipelineCache = {};
  7395. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  7396. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  7397. };
  7398. class Pipeline
  7399. {
  7400. public:
  7401. using CType = VkPipeline;
  7402. using CppType = vk::Pipeline;
  7403. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline;
  7404. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  7405. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline;
  7406. public:
  7407. Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7408. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  7409. VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo,
  7410. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7411. : m_device( *device )
  7412. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  7413. , m_dispatcher( device.getDispatcher() )
  7414. {
  7415. m_constructorSuccessCode = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  7416. getDispatcher()->vkCreateComputePipelines( static_cast<VkDevice>( *device ),
  7417. pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
  7418. 1,
  7419. reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
  7420. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  7421. reinterpret_cast<VkPipeline *>( &m_pipeline ) ) );
  7422. if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
  7423. ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
  7424. {
  7425. detail::throwResultException( m_constructorSuccessCode, "vkCreateComputePipelines" );
  7426. }
  7427. }
  7428. # if defined( VK_ENABLE_BETA_EXTENSIONS )
  7429. Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7430. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  7431. VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo,
  7432. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7433. : m_device( *device )
  7434. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  7435. , m_dispatcher( device.getDispatcher() )
  7436. {
  7437. m_constructorSuccessCode = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  7438. getDispatcher()->vkCreateExecutionGraphPipelinesAMDX( static_cast<VkDevice>( *device ),
  7439. pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
  7440. 1,
  7441. reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ),
  7442. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  7443. reinterpret_cast<VkPipeline *>( &m_pipeline ) ) );
  7444. if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
  7445. ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
  7446. {
  7447. detail::throwResultException( m_constructorSuccessCode, "vkCreateExecutionGraphPipelinesAMDX" );
  7448. }
  7449. }
  7450. # endif /*VK_ENABLE_BETA_EXTENSIONS*/
  7451. Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7452. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  7453. VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo,
  7454. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7455. : m_device( *device )
  7456. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  7457. , m_dispatcher( device.getDispatcher() )
  7458. {
  7459. m_constructorSuccessCode = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  7460. getDispatcher()->vkCreateGraphicsPipelines( static_cast<VkDevice>( *device ),
  7461. pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
  7462. 1,
  7463. reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
  7464. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  7465. reinterpret_cast<VkPipeline *>( &m_pipeline ) ) );
  7466. if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
  7467. ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
  7468. {
  7469. detail::throwResultException( m_constructorSuccessCode, "vkCreateGraphicsPipelines" );
  7470. }
  7471. }
  7472. Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7473. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
  7474. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  7475. VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo,
  7476. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7477. : m_device( *device )
  7478. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  7479. , m_dispatcher( device.getDispatcher() )
  7480. {
  7481. m_constructorSuccessCode = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  7482. getDispatcher()->vkCreateRayTracingPipelinesKHR( static_cast<VkDevice>( *device ),
  7483. deferredOperation ? static_cast<VkDeferredOperationKHR>( **deferredOperation ) : 0,
  7484. pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
  7485. 1,
  7486. reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
  7487. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  7488. reinterpret_cast<VkPipeline *>( &m_pipeline ) ) );
  7489. if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
  7490. ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) &&
  7491. ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) &&
  7492. ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
  7493. {
  7494. detail::throwResultException( m_constructorSuccessCode, "vkCreateRayTracingPipelinesKHR" );
  7495. }
  7496. }
  7497. Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7498. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  7499. VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo,
  7500. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7501. : m_device( *device )
  7502. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  7503. , m_dispatcher( device.getDispatcher() )
  7504. {
  7505. m_constructorSuccessCode = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  7506. getDispatcher()->vkCreateRayTracingPipelinesNV( static_cast<VkDevice>( *device ),
  7507. pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
  7508. 1,
  7509. reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
  7510. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  7511. reinterpret_cast<VkPipeline *>( &m_pipeline ) ) );
  7512. if ( ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::eSuccess ) &&
  7513. ( m_constructorSuccessCode != VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
  7514. {
  7515. detail::throwResultException( m_constructorSuccessCode, "vkCreateRayTracingPipelinesNV" );
  7516. }
  7517. }
  7518. Pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7519. VkPipeline pipeline,
  7520. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr,
  7521. VULKAN_HPP_NAMESPACE::Result successCode = VULKAN_HPP_NAMESPACE::Result::eSuccess )
  7522. : m_device( *device )
  7523. , m_pipeline( pipeline )
  7524. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  7525. , m_constructorSuccessCode( successCode )
  7526. , m_dispatcher( device.getDispatcher() )
  7527. {
  7528. }
  7529. Pipeline( std::nullptr_t ) {}
  7530. ~Pipeline()
  7531. {
  7532. clear();
  7533. }
  7534. Pipeline() = delete;
  7535. Pipeline( Pipeline const & ) = delete;
  7536. Pipeline( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT
  7537. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  7538. , m_pipeline( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipeline, {} ) )
  7539. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  7540. , m_constructorSuccessCode( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_constructorSuccessCode, {} ) )
  7541. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  7542. {
  7543. }
  7544. Pipeline & operator=( Pipeline const & ) = delete;
  7545. Pipeline & operator =( Pipeline && rhs ) VULKAN_HPP_NOEXCEPT
  7546. {
  7547. if ( this != &rhs )
  7548. {
  7549. std::swap( m_device, rhs.m_device );
  7550. std::swap( m_pipeline, rhs.m_pipeline );
  7551. std::swap( m_allocator, rhs.m_allocator );
  7552. std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode );
  7553. std::swap( m_dispatcher, rhs.m_dispatcher );
  7554. }
  7555. return *this;
  7556. }
  7557. VULKAN_HPP_NAMESPACE::Pipeline const & operator*() const VULKAN_HPP_NOEXCEPT
  7558. {
  7559. return m_pipeline;
  7560. }
  7561. void clear() VULKAN_HPP_NOEXCEPT
  7562. {
  7563. if ( m_pipeline )
  7564. {
  7565. getDispatcher()->vkDestroyPipeline(
  7566. static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  7567. }
  7568. m_device = nullptr;
  7569. m_pipeline = nullptr;
  7570. m_allocator = nullptr;
  7571. m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
  7572. m_dispatcher = nullptr;
  7573. }
  7574. VULKAN_HPP_NAMESPACE::Pipeline release()
  7575. {
  7576. m_device = nullptr;
  7577. m_allocator = nullptr;
  7578. m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
  7579. m_dispatcher = nullptr;
  7580. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_pipeline, nullptr );
  7581. }
  7582. VULKAN_HPP_NAMESPACE::Result getConstructorSuccessCode() const
  7583. {
  7584. return m_constructorSuccessCode;
  7585. }
  7586. VULKAN_HPP_NAMESPACE::Device getDevice() const
  7587. {
  7588. return m_device;
  7589. }
  7590. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  7591. {
  7592. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  7593. return m_dispatcher;
  7594. }
  7595. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline & rhs ) VULKAN_HPP_NOEXCEPT
  7596. {
  7597. std::swap( m_device, rhs.m_device );
  7598. std::swap( m_pipeline, rhs.m_pipeline );
  7599. std::swap( m_allocator, rhs.m_allocator );
  7600. std::swap( m_constructorSuccessCode, rhs.m_constructorSuccessCode );
  7601. std::swap( m_dispatcher, rhs.m_dispatcher );
  7602. }
  7603. //=== VK_AMD_shader_info ===
  7604. VULKAN_HPP_NODISCARD std::vector<uint8_t> getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
  7605. VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const;
  7606. # if defined( VK_ENABLE_BETA_EXTENSIONS )
  7607. //=== VK_AMDX_shader_enqueue ===
  7608. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX getExecutionGraphScratchSizeAMDX() const;
  7609. VULKAN_HPP_NODISCARD uint32_t getExecutionGraphNodeIndexAMDX( const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo ) const;
  7610. # endif /*VK_ENABLE_BETA_EXTENSIONS*/
  7611. //=== VK_KHR_ray_tracing_pipeline ===
  7612. template <typename DataType>
  7613. VULKAN_HPP_NODISCARD std::vector<DataType> getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const;
  7614. template <typename DataType>
  7615. VULKAN_HPP_NODISCARD DataType getRayTracingShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const;
  7616. template <typename DataType>
  7617. VULKAN_HPP_NODISCARD std::vector<DataType>
  7618. getRayTracingCaptureReplayShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const;
  7619. template <typename DataType>
  7620. VULKAN_HPP_NODISCARD DataType getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const;
  7621. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize
  7622. getRayTracingShaderGroupStackSizeKHR( uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT;
  7623. //=== VK_NV_ray_tracing ===
  7624. template <typename DataType>
  7625. VULKAN_HPP_NODISCARD std::vector<DataType> getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const;
  7626. template <typename DataType>
  7627. VULKAN_HPP_NODISCARD DataType getRayTracingShaderGroupHandleNV( uint32_t firstGroup, uint32_t groupCount ) const;
  7628. void compileDeferredNV( uint32_t shader ) const;
  7629. private:
  7630. VULKAN_HPP_NAMESPACE::Device m_device = {};
  7631. VULKAN_HPP_NAMESPACE::Pipeline m_pipeline = {};
  7632. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  7633. VULKAN_HPP_NAMESPACE::Result m_constructorSuccessCode = VULKAN_HPP_NAMESPACE::Result::eErrorUnknown;
  7634. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  7635. };
  7636. class Pipelines : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Pipeline>
  7637. {
  7638. public:
  7639. Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7640. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  7641. VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
  7642. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7643. {
  7644. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
  7645. std::vector<VkPipeline> pipelines( createInfos.size() );
  7646. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateComputePipelines(
  7647. static_cast<VkDevice>( *device ),
  7648. pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
  7649. createInfos.size(),
  7650. reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
  7651. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7652. pipelines.data() ) );
  7653. if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
  7654. {
  7655. this->reserve( createInfos.size() );
  7656. for ( auto const & pipeline : pipelines )
  7657. {
  7658. this->emplace_back( device, pipeline, allocator, result );
  7659. }
  7660. }
  7661. else
  7662. {
  7663. detail::throwResultException( result, "vkCreateComputePipelines" );
  7664. }
  7665. }
  7666. # if defined( VK_ENABLE_BETA_EXTENSIONS )
  7667. Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7668. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  7669. VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
  7670. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7671. {
  7672. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
  7673. std::vector<VkPipeline> pipelines( createInfos.size() );
  7674. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateExecutionGraphPipelinesAMDX(
  7675. static_cast<VkDevice>( *device ),
  7676. pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
  7677. createInfos.size(),
  7678. reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
  7679. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7680. pipelines.data() ) );
  7681. if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
  7682. {
  7683. this->reserve( createInfos.size() );
  7684. for ( auto const & pipeline : pipelines )
  7685. {
  7686. this->emplace_back( device, pipeline, allocator, result );
  7687. }
  7688. }
  7689. else
  7690. {
  7691. detail::throwResultException( result, "vkCreateExecutionGraphPipelinesAMDX" );
  7692. }
  7693. }
  7694. # endif /*VK_ENABLE_BETA_EXTENSIONS*/
  7695. Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7696. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  7697. VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
  7698. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7699. {
  7700. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
  7701. std::vector<VkPipeline> pipelines( createInfos.size() );
  7702. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateGraphicsPipelines(
  7703. static_cast<VkDevice>( *device ),
  7704. pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
  7705. createInfos.size(),
  7706. reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
  7707. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7708. pipelines.data() ) );
  7709. if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
  7710. {
  7711. this->reserve( createInfos.size() );
  7712. for ( auto const & pipeline : pipelines )
  7713. {
  7714. this->emplace_back( device, pipeline, allocator, result );
  7715. }
  7716. }
  7717. else
  7718. {
  7719. detail::throwResultException( result, "vkCreateGraphicsPipelines" );
  7720. }
  7721. }
  7722. Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7723. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
  7724. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  7725. VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
  7726. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7727. {
  7728. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
  7729. std::vector<VkPipeline> pipelines( createInfos.size() );
  7730. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateRayTracingPipelinesKHR(
  7731. static_cast<VkDevice>( *device ),
  7732. deferredOperation ? static_cast<VkDeferredOperationKHR>( **deferredOperation ) : 0,
  7733. pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
  7734. createInfos.size(),
  7735. reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
  7736. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7737. pipelines.data() ) );
  7738. if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR ) ||
  7739. ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
  7740. {
  7741. this->reserve( createInfos.size() );
  7742. for ( auto const & pipeline : pipelines )
  7743. {
  7744. this->emplace_back( device, pipeline, allocator, result );
  7745. }
  7746. }
  7747. else
  7748. {
  7749. detail::throwResultException( result, "vkCreateRayTracingPipelinesKHR" );
  7750. }
  7751. }
  7752. Pipelines( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7753. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  7754. VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
  7755. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7756. {
  7757. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
  7758. std::vector<VkPipeline> pipelines( createInfos.size() );
  7759. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateRayTracingPipelinesNV(
  7760. static_cast<VkDevice>( *device ),
  7761. pipelineCache ? static_cast<VkPipelineCache>( **pipelineCache ) : 0,
  7762. createInfos.size(),
  7763. reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
  7764. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7765. pipelines.data() ) );
  7766. if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess ) || ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
  7767. {
  7768. this->reserve( createInfos.size() );
  7769. for ( auto const & pipeline : pipelines )
  7770. {
  7771. this->emplace_back( device, pipeline, allocator, result );
  7772. }
  7773. }
  7774. else
  7775. {
  7776. detail::throwResultException( result, "vkCreateRayTracingPipelinesNV" );
  7777. }
  7778. }
  7779. Pipelines( std::nullptr_t ) {}
  7780. Pipelines() = delete;
  7781. Pipelines( Pipelines const & ) = delete;
  7782. Pipelines( Pipelines && rhs ) = default;
  7783. Pipelines & operator=( Pipelines const & ) = delete;
  7784. Pipelines & operator=( Pipelines && rhs ) = default;
  7785. };
  7786. class PipelineLayout
  7787. {
  7788. public:
  7789. using CType = VkPipelineLayout;
  7790. using CppType = vk::PipelineLayout;
  7791. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout;
  7792. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  7793. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout;
  7794. public:
  7795. PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7796. VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo,
  7797. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7798. : m_device( *device )
  7799. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  7800. , m_dispatcher( device.getDispatcher() )
  7801. {
  7802. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  7803. device.getDispatcher()->vkCreatePipelineLayout( static_cast<VkDevice>( *device ),
  7804. reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
  7805. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  7806. reinterpret_cast<VkPipelineLayout *>( &m_pipelineLayout ) ) );
  7807. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  7808. {
  7809. detail::throwResultException( result, "vkCreatePipelineLayout" );
  7810. }
  7811. }
  7812. PipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7813. VkPipelineLayout pipelineLayout,
  7814. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7815. : m_device( *device )
  7816. , m_pipelineLayout( pipelineLayout )
  7817. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  7818. , m_dispatcher( device.getDispatcher() )
  7819. {
  7820. }
  7821. PipelineLayout( std::nullptr_t ) {}
  7822. ~PipelineLayout()
  7823. {
  7824. clear();
  7825. }
  7826. PipelineLayout() = delete;
  7827. PipelineLayout( PipelineLayout const & ) = delete;
  7828. PipelineLayout( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT
  7829. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  7830. , m_pipelineLayout( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_pipelineLayout, {} ) )
  7831. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  7832. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  7833. {
  7834. }
  7835. PipelineLayout & operator=( PipelineLayout const & ) = delete;
  7836. PipelineLayout & operator =( PipelineLayout && rhs ) VULKAN_HPP_NOEXCEPT
  7837. {
  7838. if ( this != &rhs )
  7839. {
  7840. std::swap( m_device, rhs.m_device );
  7841. std::swap( m_pipelineLayout, rhs.m_pipelineLayout );
  7842. std::swap( m_allocator, rhs.m_allocator );
  7843. std::swap( m_dispatcher, rhs.m_dispatcher );
  7844. }
  7845. return *this;
  7846. }
  7847. VULKAN_HPP_NAMESPACE::PipelineLayout const & operator*() const VULKAN_HPP_NOEXCEPT
  7848. {
  7849. return m_pipelineLayout;
  7850. }
  7851. void clear() VULKAN_HPP_NOEXCEPT
  7852. {
  7853. if ( m_pipelineLayout )
  7854. {
  7855. getDispatcher()->vkDestroyPipelineLayout( static_cast<VkDevice>( m_device ),
  7856. static_cast<VkPipelineLayout>( m_pipelineLayout ),
  7857. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  7858. }
  7859. m_device = nullptr;
  7860. m_pipelineLayout = nullptr;
  7861. m_allocator = nullptr;
  7862. m_dispatcher = nullptr;
  7863. }
  7864. VULKAN_HPP_NAMESPACE::PipelineLayout release()
  7865. {
  7866. m_device = nullptr;
  7867. m_allocator = nullptr;
  7868. m_dispatcher = nullptr;
  7869. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_pipelineLayout, nullptr );
  7870. }
  7871. VULKAN_HPP_NAMESPACE::Device getDevice() const
  7872. {
  7873. return m_device;
  7874. }
  7875. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  7876. {
  7877. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  7878. return m_dispatcher;
  7879. }
  7880. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineLayout & rhs ) VULKAN_HPP_NOEXCEPT
  7881. {
  7882. std::swap( m_device, rhs.m_device );
  7883. std::swap( m_pipelineLayout, rhs.m_pipelineLayout );
  7884. std::swap( m_allocator, rhs.m_allocator );
  7885. std::swap( m_dispatcher, rhs.m_dispatcher );
  7886. }
  7887. private:
  7888. VULKAN_HPP_NAMESPACE::Device m_device = {};
  7889. VULKAN_HPP_NAMESPACE::PipelineLayout m_pipelineLayout = {};
  7890. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  7891. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  7892. };
  7893. class PrivateDataSlot
  7894. {
  7895. public:
  7896. using CType = VkPrivateDataSlot;
  7897. using CppType = vk::PrivateDataSlot;
  7898. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlot;
  7899. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  7900. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
  7901. public:
  7902. PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7903. VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
  7904. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7905. : m_device( *device )
  7906. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  7907. , m_dispatcher( device.getDispatcher() )
  7908. {
  7909. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  7910. device.getDispatcher()->vkCreatePrivateDataSlot( static_cast<VkDevice>( *device ),
  7911. reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
  7912. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  7913. reinterpret_cast<VkPrivateDataSlot *>( &m_privateDataSlot ) ) );
  7914. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  7915. {
  7916. detail::throwResultException( result, "vkCreatePrivateDataSlot" );
  7917. }
  7918. }
  7919. PrivateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  7920. VkPrivateDataSlot privateDataSlot,
  7921. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  7922. : m_device( *device )
  7923. , m_privateDataSlot( privateDataSlot )
  7924. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  7925. , m_dispatcher( device.getDispatcher() )
  7926. {
  7927. }
  7928. PrivateDataSlot( std::nullptr_t ) {}
  7929. ~PrivateDataSlot()
  7930. {
  7931. clear();
  7932. }
  7933. PrivateDataSlot() = delete;
  7934. PrivateDataSlot( PrivateDataSlot const & ) = delete;
  7935. PrivateDataSlot( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT
  7936. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  7937. , m_privateDataSlot( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_privateDataSlot, {} ) )
  7938. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  7939. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  7940. {
  7941. }
  7942. PrivateDataSlot & operator=( PrivateDataSlot const & ) = delete;
  7943. PrivateDataSlot & operator =( PrivateDataSlot && rhs ) VULKAN_HPP_NOEXCEPT
  7944. {
  7945. if ( this != &rhs )
  7946. {
  7947. std::swap( m_device, rhs.m_device );
  7948. std::swap( m_privateDataSlot, rhs.m_privateDataSlot );
  7949. std::swap( m_allocator, rhs.m_allocator );
  7950. std::swap( m_dispatcher, rhs.m_dispatcher );
  7951. }
  7952. return *this;
  7953. }
  7954. VULKAN_HPP_NAMESPACE::PrivateDataSlot const & operator*() const VULKAN_HPP_NOEXCEPT
  7955. {
  7956. return m_privateDataSlot;
  7957. }
  7958. void clear() VULKAN_HPP_NOEXCEPT
  7959. {
  7960. if ( m_privateDataSlot )
  7961. {
  7962. getDispatcher()->vkDestroyPrivateDataSlot( static_cast<VkDevice>( m_device ),
  7963. static_cast<VkPrivateDataSlot>( m_privateDataSlot ),
  7964. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  7965. }
  7966. m_device = nullptr;
  7967. m_privateDataSlot = nullptr;
  7968. m_allocator = nullptr;
  7969. m_dispatcher = nullptr;
  7970. }
  7971. VULKAN_HPP_NAMESPACE::PrivateDataSlot release()
  7972. {
  7973. m_device = nullptr;
  7974. m_allocator = nullptr;
  7975. m_dispatcher = nullptr;
  7976. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_privateDataSlot, nullptr );
  7977. }
  7978. VULKAN_HPP_NAMESPACE::Device getDevice() const
  7979. {
  7980. return m_device;
  7981. }
  7982. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  7983. {
  7984. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  7985. return m_dispatcher;
  7986. }
  7987. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot & rhs ) VULKAN_HPP_NOEXCEPT
  7988. {
  7989. std::swap( m_device, rhs.m_device );
  7990. std::swap( m_privateDataSlot, rhs.m_privateDataSlot );
  7991. std::swap( m_allocator, rhs.m_allocator );
  7992. std::swap( m_dispatcher, rhs.m_dispatcher );
  7993. }
  7994. private:
  7995. VULKAN_HPP_NAMESPACE::Device m_device = {};
  7996. VULKAN_HPP_NAMESPACE::PrivateDataSlot m_privateDataSlot = {};
  7997. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  7998. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  7999. };
  8000. class QueryPool
  8001. {
  8002. public:
  8003. using CType = VkQueryPool;
  8004. using CppType = vk::QueryPool;
  8005. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool;
  8006. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  8007. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool;
  8008. public:
  8009. QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  8010. VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo,
  8011. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8012. : m_device( *device )
  8013. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8014. , m_dispatcher( device.getDispatcher() )
  8015. {
  8016. VULKAN_HPP_NAMESPACE::Result result =
  8017. static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateQueryPool( static_cast<VkDevice>( *device ),
  8018. reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
  8019. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  8020. reinterpret_cast<VkQueryPool *>( &m_queryPool ) ) );
  8021. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  8022. {
  8023. detail::throwResultException( result, "vkCreateQueryPool" );
  8024. }
  8025. }
  8026. QueryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  8027. VkQueryPool queryPool,
  8028. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8029. : m_device( *device )
  8030. , m_queryPool( queryPool )
  8031. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8032. , m_dispatcher( device.getDispatcher() )
  8033. {
  8034. }
  8035. QueryPool( std::nullptr_t ) {}
  8036. ~QueryPool()
  8037. {
  8038. clear();
  8039. }
  8040. QueryPool() = delete;
  8041. QueryPool( QueryPool const & ) = delete;
  8042. QueryPool( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT
  8043. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  8044. , m_queryPool( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queryPool, {} ) )
  8045. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  8046. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  8047. {
  8048. }
  8049. QueryPool & operator=( QueryPool const & ) = delete;
  8050. QueryPool & operator =( QueryPool && rhs ) VULKAN_HPP_NOEXCEPT
  8051. {
  8052. if ( this != &rhs )
  8053. {
  8054. std::swap( m_device, rhs.m_device );
  8055. std::swap( m_queryPool, rhs.m_queryPool );
  8056. std::swap( m_allocator, rhs.m_allocator );
  8057. std::swap( m_dispatcher, rhs.m_dispatcher );
  8058. }
  8059. return *this;
  8060. }
  8061. VULKAN_HPP_NAMESPACE::QueryPool const & operator*() const VULKAN_HPP_NOEXCEPT
  8062. {
  8063. return m_queryPool;
  8064. }
  8065. void clear() VULKAN_HPP_NOEXCEPT
  8066. {
  8067. if ( m_queryPool )
  8068. {
  8069. getDispatcher()->vkDestroyQueryPool(
  8070. static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( m_queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  8071. }
  8072. m_device = nullptr;
  8073. m_queryPool = nullptr;
  8074. m_allocator = nullptr;
  8075. m_dispatcher = nullptr;
  8076. }
  8077. VULKAN_HPP_NAMESPACE::QueryPool release()
  8078. {
  8079. m_device = nullptr;
  8080. m_allocator = nullptr;
  8081. m_dispatcher = nullptr;
  8082. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_queryPool, nullptr );
  8083. }
  8084. VULKAN_HPP_NAMESPACE::Device getDevice() const
  8085. {
  8086. return m_device;
  8087. }
  8088. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  8089. {
  8090. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  8091. return m_dispatcher;
  8092. }
  8093. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::QueryPool & rhs ) VULKAN_HPP_NOEXCEPT
  8094. {
  8095. std::swap( m_device, rhs.m_device );
  8096. std::swap( m_queryPool, rhs.m_queryPool );
  8097. std::swap( m_allocator, rhs.m_allocator );
  8098. std::swap( m_dispatcher, rhs.m_dispatcher );
  8099. }
  8100. //=== VK_VERSION_1_0 ===
  8101. template <typename DataType>
  8102. VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::Result, std::vector<DataType>>
  8103. getResults( uint32_t firstQuery,
  8104. uint32_t queryCount,
  8105. size_t dataSize,
  8106. VULKAN_HPP_NAMESPACE::DeviceSize stride,
  8107. VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
  8108. template <typename DataType>
  8109. VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::Result, DataType>
  8110. getResult( uint32_t firstQuery,
  8111. uint32_t queryCount,
  8112. VULKAN_HPP_NAMESPACE::DeviceSize stride,
  8113. VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
  8114. //=== VK_VERSION_1_2 ===
  8115. void reset( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT;
  8116. //=== VK_EXT_host_query_reset ===
  8117. void resetEXT( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT;
  8118. private:
  8119. VULKAN_HPP_NAMESPACE::Device m_device = {};
  8120. VULKAN_HPP_NAMESPACE::QueryPool m_queryPool = {};
  8121. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  8122. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  8123. };
  8124. class Queue
  8125. {
  8126. public:
  8127. using CType = VkQueue;
  8128. using CppType = vk::Queue;
  8129. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueue;
  8130. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  8131. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue;
  8132. public:
  8133. Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, uint32_t queueFamilyIndex, uint32_t queueIndex )
  8134. : m_dispatcher( device.getDispatcher() )
  8135. {
  8136. getDispatcher()->vkGetDeviceQueue( static_cast<VkDevice>( *device ), queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &m_queue ) );
  8137. }
  8138. Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo )
  8139. : m_dispatcher( device.getDispatcher() )
  8140. {
  8141. getDispatcher()->vkGetDeviceQueue2(
  8142. static_cast<VkDevice>( *device ), reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &m_queue ) );
  8143. }
  8144. Queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device, VkQueue queue ) : m_queue( queue ), m_dispatcher( device.getDispatcher() )
  8145. {
  8146. }
  8147. Queue( std::nullptr_t ) {}
  8148. ~Queue()
  8149. {
  8150. clear();
  8151. }
  8152. Queue() = delete;
  8153. Queue( Queue const & rhs ) : m_queue( rhs.m_queue ), m_dispatcher( rhs.m_dispatcher ) {}
  8154. Queue( Queue && rhs ) VULKAN_HPP_NOEXCEPT
  8155. : m_queue( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_queue, {} ) )
  8156. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  8157. {
  8158. }
  8159. Queue & operator=( Queue const & rhs )
  8160. {
  8161. m_queue = rhs.m_queue;
  8162. m_dispatcher = rhs.m_dispatcher;
  8163. return *this;
  8164. }
  8165. Queue & operator=( Queue && rhs ) VULKAN_HPP_NOEXCEPT
  8166. {
  8167. if ( this != &rhs )
  8168. {
  8169. std::swap( m_queue, rhs.m_queue );
  8170. std::swap( m_dispatcher, rhs.m_dispatcher );
  8171. }
  8172. return *this;
  8173. }
  8174. VULKAN_HPP_NAMESPACE::Queue const & operator*() const VULKAN_HPP_NOEXCEPT
  8175. {
  8176. return m_queue;
  8177. }
  8178. void clear() VULKAN_HPP_NOEXCEPT
  8179. {
  8180. m_queue = nullptr;
  8181. m_dispatcher = nullptr;
  8182. }
  8183. VULKAN_HPP_NAMESPACE::Queue release()
  8184. {
  8185. m_dispatcher = nullptr;
  8186. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_queue, nullptr );
  8187. }
  8188. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  8189. {
  8190. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  8191. return m_dispatcher;
  8192. }
  8193. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Queue & rhs ) VULKAN_HPP_NOEXCEPT
  8194. {
  8195. std::swap( m_queue, rhs.m_queue );
  8196. std::swap( m_dispatcher, rhs.m_dispatcher );
  8197. }
  8198. //=== VK_VERSION_1_0 ===
  8199. void submit( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,
  8200. VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
  8201. void waitIdle() const;
  8202. void bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,
  8203. VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
  8204. //=== VK_VERSION_1_3 ===
  8205. void submit2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
  8206. VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
  8207. //=== VK_KHR_swapchain ===
  8208. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const;
  8209. //=== VK_EXT_debug_utils ===
  8210. void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT;
  8211. void endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT;
  8212. void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT;
  8213. //=== VK_NV_device_diagnostic_checkpoints ===
  8214. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV> getCheckpointDataNV() const;
  8215. //=== VK_INTEL_performance_query ===
  8216. void setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const;
  8217. //=== VK_KHR_synchronization2 ===
  8218. void submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
  8219. VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
  8220. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV> getCheckpointData2NV() const;
  8221. //=== VK_NV_low_latency2 ===
  8222. void notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo ) const VULKAN_HPP_NOEXCEPT;
  8223. private:
  8224. VULKAN_HPP_NAMESPACE::Queue m_queue = {};
  8225. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  8226. };
  8227. class RenderPass
  8228. {
  8229. public:
  8230. using CType = VkRenderPass;
  8231. using CppType = vk::RenderPass;
  8232. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass;
  8233. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  8234. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass;
  8235. public:
  8236. RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  8237. VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo,
  8238. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8239. : m_device( *device )
  8240. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8241. , m_dispatcher( device.getDispatcher() )
  8242. {
  8243. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  8244. device.getDispatcher()->vkCreateRenderPass( static_cast<VkDevice>( *device ),
  8245. reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
  8246. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  8247. reinterpret_cast<VkRenderPass *>( &m_renderPass ) ) );
  8248. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  8249. {
  8250. detail::throwResultException( result, "vkCreateRenderPass" );
  8251. }
  8252. }
  8253. RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  8254. VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo,
  8255. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8256. : m_device( *device )
  8257. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8258. , m_dispatcher( device.getDispatcher() )
  8259. {
  8260. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  8261. device.getDispatcher()->vkCreateRenderPass2( static_cast<VkDevice>( *device ),
  8262. reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
  8263. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  8264. reinterpret_cast<VkRenderPass *>( &m_renderPass ) ) );
  8265. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  8266. {
  8267. detail::throwResultException( result, "vkCreateRenderPass2" );
  8268. }
  8269. }
  8270. RenderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  8271. VkRenderPass renderPass,
  8272. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8273. : m_device( *device )
  8274. , m_renderPass( renderPass )
  8275. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8276. , m_dispatcher( device.getDispatcher() )
  8277. {
  8278. }
  8279. RenderPass( std::nullptr_t ) {}
  8280. ~RenderPass()
  8281. {
  8282. clear();
  8283. }
  8284. RenderPass() = delete;
  8285. RenderPass( RenderPass const & ) = delete;
  8286. RenderPass( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT
  8287. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  8288. , m_renderPass( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_renderPass, {} ) )
  8289. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  8290. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  8291. {
  8292. }
  8293. RenderPass & operator=( RenderPass const & ) = delete;
  8294. RenderPass & operator =( RenderPass && rhs ) VULKAN_HPP_NOEXCEPT
  8295. {
  8296. if ( this != &rhs )
  8297. {
  8298. std::swap( m_device, rhs.m_device );
  8299. std::swap( m_renderPass, rhs.m_renderPass );
  8300. std::swap( m_allocator, rhs.m_allocator );
  8301. std::swap( m_dispatcher, rhs.m_dispatcher );
  8302. }
  8303. return *this;
  8304. }
  8305. VULKAN_HPP_NAMESPACE::RenderPass const & operator*() const VULKAN_HPP_NOEXCEPT
  8306. {
  8307. return m_renderPass;
  8308. }
  8309. void clear() VULKAN_HPP_NOEXCEPT
  8310. {
  8311. if ( m_renderPass )
  8312. {
  8313. getDispatcher()->vkDestroyRenderPass(
  8314. static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( m_renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  8315. }
  8316. m_device = nullptr;
  8317. m_renderPass = nullptr;
  8318. m_allocator = nullptr;
  8319. m_dispatcher = nullptr;
  8320. }
  8321. VULKAN_HPP_NAMESPACE::RenderPass release()
  8322. {
  8323. m_device = nullptr;
  8324. m_allocator = nullptr;
  8325. m_dispatcher = nullptr;
  8326. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_renderPass, nullptr );
  8327. }
  8328. VULKAN_HPP_NAMESPACE::Device getDevice() const
  8329. {
  8330. return m_device;
  8331. }
  8332. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  8333. {
  8334. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  8335. return m_dispatcher;
  8336. }
  8337. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::RenderPass & rhs ) VULKAN_HPP_NOEXCEPT
  8338. {
  8339. std::swap( m_device, rhs.m_device );
  8340. std::swap( m_renderPass, rhs.m_renderPass );
  8341. std::swap( m_allocator, rhs.m_allocator );
  8342. std::swap( m_dispatcher, rhs.m_dispatcher );
  8343. }
  8344. //=== VK_VERSION_1_0 ===
  8345. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity() const VULKAN_HPP_NOEXCEPT;
  8346. //=== VK_HUAWEI_subpass_shading ===
  8347. VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::Result, VULKAN_HPP_NAMESPACE::Extent2D> getSubpassShadingMaxWorkgroupSizeHUAWEI() const;
  8348. private:
  8349. VULKAN_HPP_NAMESPACE::Device m_device = {};
  8350. VULKAN_HPP_NAMESPACE::RenderPass m_renderPass = {};
  8351. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  8352. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  8353. };
  8354. class Sampler
  8355. {
  8356. public:
  8357. using CType = VkSampler;
  8358. using CppType = vk::Sampler;
  8359. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler;
  8360. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  8361. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler;
  8362. public:
  8363. Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  8364. VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo,
  8365. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8366. : m_device( *device )
  8367. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8368. , m_dispatcher( device.getDispatcher() )
  8369. {
  8370. VULKAN_HPP_NAMESPACE::Result result =
  8371. static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateSampler( static_cast<VkDevice>( *device ),
  8372. reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
  8373. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  8374. reinterpret_cast<VkSampler *>( &m_sampler ) ) );
  8375. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  8376. {
  8377. detail::throwResultException( result, "vkCreateSampler" );
  8378. }
  8379. }
  8380. Sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  8381. VkSampler sampler,
  8382. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8383. : m_device( *device )
  8384. , m_sampler( sampler )
  8385. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8386. , m_dispatcher( device.getDispatcher() )
  8387. {
  8388. }
  8389. Sampler( std::nullptr_t ) {}
  8390. ~Sampler()
  8391. {
  8392. clear();
  8393. }
  8394. Sampler() = delete;
  8395. Sampler( Sampler const & ) = delete;
  8396. Sampler( Sampler && rhs ) VULKAN_HPP_NOEXCEPT
  8397. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  8398. , m_sampler( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_sampler, {} ) )
  8399. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  8400. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  8401. {
  8402. }
  8403. Sampler & operator=( Sampler const & ) = delete;
  8404. Sampler & operator =( Sampler && rhs ) VULKAN_HPP_NOEXCEPT
  8405. {
  8406. if ( this != &rhs )
  8407. {
  8408. std::swap( m_device, rhs.m_device );
  8409. std::swap( m_sampler, rhs.m_sampler );
  8410. std::swap( m_allocator, rhs.m_allocator );
  8411. std::swap( m_dispatcher, rhs.m_dispatcher );
  8412. }
  8413. return *this;
  8414. }
  8415. VULKAN_HPP_NAMESPACE::Sampler const & operator*() const VULKAN_HPP_NOEXCEPT
  8416. {
  8417. return m_sampler;
  8418. }
  8419. void clear() VULKAN_HPP_NOEXCEPT
  8420. {
  8421. if ( m_sampler )
  8422. {
  8423. getDispatcher()->vkDestroySampler(
  8424. static_cast<VkDevice>( m_device ), static_cast<VkSampler>( m_sampler ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  8425. }
  8426. m_device = nullptr;
  8427. m_sampler = nullptr;
  8428. m_allocator = nullptr;
  8429. m_dispatcher = nullptr;
  8430. }
  8431. VULKAN_HPP_NAMESPACE::Sampler release()
  8432. {
  8433. m_device = nullptr;
  8434. m_allocator = nullptr;
  8435. m_dispatcher = nullptr;
  8436. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_sampler, nullptr );
  8437. }
  8438. VULKAN_HPP_NAMESPACE::Device getDevice() const
  8439. {
  8440. return m_device;
  8441. }
  8442. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  8443. {
  8444. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  8445. return m_dispatcher;
  8446. }
  8447. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Sampler & rhs ) VULKAN_HPP_NOEXCEPT
  8448. {
  8449. std::swap( m_device, rhs.m_device );
  8450. std::swap( m_sampler, rhs.m_sampler );
  8451. std::swap( m_allocator, rhs.m_allocator );
  8452. std::swap( m_dispatcher, rhs.m_dispatcher );
  8453. }
  8454. private:
  8455. VULKAN_HPP_NAMESPACE::Device m_device = {};
  8456. VULKAN_HPP_NAMESPACE::Sampler m_sampler = {};
  8457. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  8458. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  8459. };
  8460. class SamplerYcbcrConversion
  8461. {
  8462. public:
  8463. using CType = VkSamplerYcbcrConversion;
  8464. using CppType = vk::SamplerYcbcrConversion;
  8465. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion;
  8466. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  8467. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion;
  8468. public:
  8469. SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  8470. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
  8471. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8472. : m_device( *device )
  8473. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8474. , m_dispatcher( device.getDispatcher() )
  8475. {
  8476. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  8477. device.getDispatcher()->vkCreateSamplerYcbcrConversion( static_cast<VkDevice>( *device ),
  8478. reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
  8479. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  8480. reinterpret_cast<VkSamplerYcbcrConversion *>( &m_ycbcrConversion ) ) );
  8481. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  8482. {
  8483. detail::throwResultException( result, "vkCreateSamplerYcbcrConversion" );
  8484. }
  8485. }
  8486. SamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  8487. VkSamplerYcbcrConversion ycbcrConversion,
  8488. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8489. : m_device( *device )
  8490. , m_ycbcrConversion( ycbcrConversion )
  8491. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8492. , m_dispatcher( device.getDispatcher() )
  8493. {
  8494. }
  8495. SamplerYcbcrConversion( std::nullptr_t ) {}
  8496. ~SamplerYcbcrConversion()
  8497. {
  8498. clear();
  8499. }
  8500. SamplerYcbcrConversion() = delete;
  8501. SamplerYcbcrConversion( SamplerYcbcrConversion const & ) = delete;
  8502. SamplerYcbcrConversion( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT
  8503. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  8504. , m_ycbcrConversion( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_ycbcrConversion, {} ) )
  8505. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  8506. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  8507. {
  8508. }
  8509. SamplerYcbcrConversion & operator=( SamplerYcbcrConversion const & ) = delete;
  8510. SamplerYcbcrConversion & operator =( SamplerYcbcrConversion && rhs ) VULKAN_HPP_NOEXCEPT
  8511. {
  8512. if ( this != &rhs )
  8513. {
  8514. std::swap( m_device, rhs.m_device );
  8515. std::swap( m_ycbcrConversion, rhs.m_ycbcrConversion );
  8516. std::swap( m_allocator, rhs.m_allocator );
  8517. std::swap( m_dispatcher, rhs.m_dispatcher );
  8518. }
  8519. return *this;
  8520. }
  8521. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const & operator*() const VULKAN_HPP_NOEXCEPT
  8522. {
  8523. return m_ycbcrConversion;
  8524. }
  8525. void clear() VULKAN_HPP_NOEXCEPT
  8526. {
  8527. if ( m_ycbcrConversion )
  8528. {
  8529. getDispatcher()->vkDestroySamplerYcbcrConversion( static_cast<VkDevice>( m_device ),
  8530. static_cast<VkSamplerYcbcrConversion>( m_ycbcrConversion ),
  8531. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  8532. }
  8533. m_device = nullptr;
  8534. m_ycbcrConversion = nullptr;
  8535. m_allocator = nullptr;
  8536. m_dispatcher = nullptr;
  8537. }
  8538. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion release()
  8539. {
  8540. m_device = nullptr;
  8541. m_allocator = nullptr;
  8542. m_dispatcher = nullptr;
  8543. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_ycbcrConversion, nullptr );
  8544. }
  8545. VULKAN_HPP_NAMESPACE::Device getDevice() const
  8546. {
  8547. return m_device;
  8548. }
  8549. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  8550. {
  8551. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  8552. return m_dispatcher;
  8553. }
  8554. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion & rhs ) VULKAN_HPP_NOEXCEPT
  8555. {
  8556. std::swap( m_device, rhs.m_device );
  8557. std::swap( m_ycbcrConversion, rhs.m_ycbcrConversion );
  8558. std::swap( m_allocator, rhs.m_allocator );
  8559. std::swap( m_dispatcher, rhs.m_dispatcher );
  8560. }
  8561. private:
  8562. VULKAN_HPP_NAMESPACE::Device m_device = {};
  8563. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion m_ycbcrConversion = {};
  8564. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  8565. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  8566. };
  8567. class Semaphore
  8568. {
  8569. public:
  8570. using CType = VkSemaphore;
  8571. using CppType = vk::Semaphore;
  8572. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore;
  8573. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  8574. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore;
  8575. public:
  8576. Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  8577. VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo,
  8578. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8579. : m_device( *device )
  8580. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8581. , m_dispatcher( device.getDispatcher() )
  8582. {
  8583. VULKAN_HPP_NAMESPACE::Result result =
  8584. static_cast<VULKAN_HPP_NAMESPACE::Result>( device.getDispatcher()->vkCreateSemaphore( static_cast<VkDevice>( *device ),
  8585. reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
  8586. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  8587. reinterpret_cast<VkSemaphore *>( &m_semaphore ) ) );
  8588. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  8589. {
  8590. detail::throwResultException( result, "vkCreateSemaphore" );
  8591. }
  8592. }
  8593. Semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  8594. VkSemaphore semaphore,
  8595. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8596. : m_device( *device )
  8597. , m_semaphore( semaphore )
  8598. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8599. , m_dispatcher( device.getDispatcher() )
  8600. {
  8601. }
  8602. Semaphore( std::nullptr_t ) {}
  8603. ~Semaphore()
  8604. {
  8605. clear();
  8606. }
  8607. Semaphore() = delete;
  8608. Semaphore( Semaphore const & ) = delete;
  8609. Semaphore( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT
  8610. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  8611. , m_semaphore( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_semaphore, {} ) )
  8612. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  8613. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  8614. {
  8615. }
  8616. Semaphore & operator=( Semaphore const & ) = delete;
  8617. Semaphore & operator =( Semaphore && rhs ) VULKAN_HPP_NOEXCEPT
  8618. {
  8619. if ( this != &rhs )
  8620. {
  8621. std::swap( m_device, rhs.m_device );
  8622. std::swap( m_semaphore, rhs.m_semaphore );
  8623. std::swap( m_allocator, rhs.m_allocator );
  8624. std::swap( m_dispatcher, rhs.m_dispatcher );
  8625. }
  8626. return *this;
  8627. }
  8628. VULKAN_HPP_NAMESPACE::Semaphore const & operator*() const VULKAN_HPP_NOEXCEPT
  8629. {
  8630. return m_semaphore;
  8631. }
  8632. void clear() VULKAN_HPP_NOEXCEPT
  8633. {
  8634. if ( m_semaphore )
  8635. {
  8636. getDispatcher()->vkDestroySemaphore(
  8637. static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( m_semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  8638. }
  8639. m_device = nullptr;
  8640. m_semaphore = nullptr;
  8641. m_allocator = nullptr;
  8642. m_dispatcher = nullptr;
  8643. }
  8644. VULKAN_HPP_NAMESPACE::Semaphore release()
  8645. {
  8646. m_device = nullptr;
  8647. m_allocator = nullptr;
  8648. m_dispatcher = nullptr;
  8649. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_semaphore, nullptr );
  8650. }
  8651. VULKAN_HPP_NAMESPACE::Device getDevice() const
  8652. {
  8653. return m_device;
  8654. }
  8655. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  8656. {
  8657. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  8658. return m_dispatcher;
  8659. }
  8660. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Semaphore & rhs ) VULKAN_HPP_NOEXCEPT
  8661. {
  8662. std::swap( m_device, rhs.m_device );
  8663. std::swap( m_semaphore, rhs.m_semaphore );
  8664. std::swap( m_allocator, rhs.m_allocator );
  8665. std::swap( m_dispatcher, rhs.m_dispatcher );
  8666. }
  8667. //=== VK_VERSION_1_2 ===
  8668. VULKAN_HPP_NODISCARD uint64_t getCounterValue() const;
  8669. //=== VK_KHR_timeline_semaphore ===
  8670. VULKAN_HPP_NODISCARD uint64_t getCounterValueKHR() const;
  8671. private:
  8672. VULKAN_HPP_NAMESPACE::Device m_device = {};
  8673. VULKAN_HPP_NAMESPACE::Semaphore m_semaphore = {};
  8674. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  8675. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  8676. };
  8677. class ShaderEXT
  8678. {
  8679. public:
  8680. using CType = VkShaderEXT;
  8681. using CppType = vk::ShaderEXT;
  8682. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderEXT;
  8683. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  8684. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
  8685. public:
  8686. ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  8687. VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo,
  8688. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8689. : m_device( *device )
  8690. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8691. , m_dispatcher( device.getDispatcher() )
  8692. {
  8693. VULKAN_HPP_NAMESPACE::Result result =
  8694. static_cast<VULKAN_HPP_NAMESPACE::Result>( getDispatcher()->vkCreateShadersEXT( static_cast<VkDevice>( *device ),
  8695. 1,
  8696. reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ),
  8697. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  8698. reinterpret_cast<VkShaderEXT *>( &m_shader ) ) );
  8699. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  8700. {
  8701. detail::throwResultException( result, "vkCreateShadersEXT" );
  8702. }
  8703. }
  8704. ShaderEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  8705. VkShaderEXT shader,
  8706. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8707. : m_device( *device )
  8708. , m_shader( shader )
  8709. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8710. , m_dispatcher( device.getDispatcher() )
  8711. {
  8712. }
  8713. ShaderEXT( std::nullptr_t ) {}
  8714. ~ShaderEXT()
  8715. {
  8716. clear();
  8717. }
  8718. ShaderEXT() = delete;
  8719. ShaderEXT( ShaderEXT const & ) = delete;
  8720. ShaderEXT( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT
  8721. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  8722. , m_shader( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shader, {} ) )
  8723. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  8724. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  8725. {
  8726. }
  8727. ShaderEXT & operator=( ShaderEXT const & ) = delete;
  8728. ShaderEXT & operator =( ShaderEXT && rhs ) VULKAN_HPP_NOEXCEPT
  8729. {
  8730. if ( this != &rhs )
  8731. {
  8732. std::swap( m_device, rhs.m_device );
  8733. std::swap( m_shader, rhs.m_shader );
  8734. std::swap( m_allocator, rhs.m_allocator );
  8735. std::swap( m_dispatcher, rhs.m_dispatcher );
  8736. }
  8737. return *this;
  8738. }
  8739. VULKAN_HPP_NAMESPACE::ShaderEXT const & operator*() const VULKAN_HPP_NOEXCEPT
  8740. {
  8741. return m_shader;
  8742. }
  8743. void clear() VULKAN_HPP_NOEXCEPT
  8744. {
  8745. if ( m_shader )
  8746. {
  8747. getDispatcher()->vkDestroyShaderEXT(
  8748. static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( m_shader ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  8749. }
  8750. m_device = nullptr;
  8751. m_shader = nullptr;
  8752. m_allocator = nullptr;
  8753. m_dispatcher = nullptr;
  8754. }
  8755. VULKAN_HPP_NAMESPACE::ShaderEXT release()
  8756. {
  8757. m_device = nullptr;
  8758. m_allocator = nullptr;
  8759. m_dispatcher = nullptr;
  8760. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_shader, nullptr );
  8761. }
  8762. VULKAN_HPP_NAMESPACE::Device getDevice() const
  8763. {
  8764. return m_device;
  8765. }
  8766. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  8767. {
  8768. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  8769. return m_dispatcher;
  8770. }
  8771. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT & rhs ) VULKAN_HPP_NOEXCEPT
  8772. {
  8773. std::swap( m_device, rhs.m_device );
  8774. std::swap( m_shader, rhs.m_shader );
  8775. std::swap( m_allocator, rhs.m_allocator );
  8776. std::swap( m_dispatcher, rhs.m_dispatcher );
  8777. }
  8778. //=== VK_EXT_shader_object ===
  8779. VULKAN_HPP_NODISCARD std::vector<uint8_t> getBinaryData() const;
  8780. private:
  8781. VULKAN_HPP_NAMESPACE::Device m_device = {};
  8782. VULKAN_HPP_NAMESPACE::ShaderEXT m_shader = {};
  8783. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  8784. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  8785. };
  8786. class ShaderEXTs : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>
  8787. {
  8788. public:
  8789. ShaderEXTs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  8790. VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
  8791. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8792. {
  8793. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
  8794. std::vector<VkShaderEXT> shaders( createInfos.size() );
  8795. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateShadersEXT(
  8796. static_cast<VkDevice>( *device ),
  8797. createInfos.size(),
  8798. reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
  8799. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  8800. shaders.data() ) );
  8801. if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
  8802. {
  8803. this->reserve( createInfos.size() );
  8804. for ( auto const & shaderEXT : shaders )
  8805. {
  8806. this->emplace_back( device, shaderEXT, allocator );
  8807. }
  8808. }
  8809. else
  8810. {
  8811. detail::throwResultException( result, "vkCreateShadersEXT" );
  8812. }
  8813. }
  8814. ShaderEXTs( std::nullptr_t ) {}
  8815. ShaderEXTs() = delete;
  8816. ShaderEXTs( ShaderEXTs const & ) = delete;
  8817. ShaderEXTs( ShaderEXTs && rhs ) = default;
  8818. ShaderEXTs & operator=( ShaderEXTs const & ) = delete;
  8819. ShaderEXTs & operator=( ShaderEXTs && rhs ) = default;
  8820. };
  8821. class ShaderModule
  8822. {
  8823. public:
  8824. using CType = VkShaderModule;
  8825. using CppType = vk::ShaderModule;
  8826. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule;
  8827. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  8828. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule;
  8829. public:
  8830. ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  8831. VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo,
  8832. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8833. : m_device( *device )
  8834. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8835. , m_dispatcher( device.getDispatcher() )
  8836. {
  8837. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  8838. device.getDispatcher()->vkCreateShaderModule( static_cast<VkDevice>( *device ),
  8839. reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
  8840. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  8841. reinterpret_cast<VkShaderModule *>( &m_shaderModule ) ) );
  8842. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  8843. {
  8844. detail::throwResultException( result, "vkCreateShaderModule" );
  8845. }
  8846. }
  8847. ShaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  8848. VkShaderModule shaderModule,
  8849. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8850. : m_device( *device )
  8851. , m_shaderModule( shaderModule )
  8852. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8853. , m_dispatcher( device.getDispatcher() )
  8854. {
  8855. }
  8856. ShaderModule( std::nullptr_t ) {}
  8857. ~ShaderModule()
  8858. {
  8859. clear();
  8860. }
  8861. ShaderModule() = delete;
  8862. ShaderModule( ShaderModule const & ) = delete;
  8863. ShaderModule( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT
  8864. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  8865. , m_shaderModule( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_shaderModule, {} ) )
  8866. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  8867. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  8868. {
  8869. }
  8870. ShaderModule & operator=( ShaderModule const & ) = delete;
  8871. ShaderModule & operator =( ShaderModule && rhs ) VULKAN_HPP_NOEXCEPT
  8872. {
  8873. if ( this != &rhs )
  8874. {
  8875. std::swap( m_device, rhs.m_device );
  8876. std::swap( m_shaderModule, rhs.m_shaderModule );
  8877. std::swap( m_allocator, rhs.m_allocator );
  8878. std::swap( m_dispatcher, rhs.m_dispatcher );
  8879. }
  8880. return *this;
  8881. }
  8882. VULKAN_HPP_NAMESPACE::ShaderModule const & operator*() const VULKAN_HPP_NOEXCEPT
  8883. {
  8884. return m_shaderModule;
  8885. }
  8886. void clear() VULKAN_HPP_NOEXCEPT
  8887. {
  8888. if ( m_shaderModule )
  8889. {
  8890. getDispatcher()->vkDestroyShaderModule(
  8891. static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( m_shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  8892. }
  8893. m_device = nullptr;
  8894. m_shaderModule = nullptr;
  8895. m_allocator = nullptr;
  8896. m_dispatcher = nullptr;
  8897. }
  8898. VULKAN_HPP_NAMESPACE::ShaderModule release()
  8899. {
  8900. m_device = nullptr;
  8901. m_allocator = nullptr;
  8902. m_dispatcher = nullptr;
  8903. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_shaderModule, nullptr );
  8904. }
  8905. VULKAN_HPP_NAMESPACE::Device getDevice() const
  8906. {
  8907. return m_device;
  8908. }
  8909. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  8910. {
  8911. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  8912. return m_dispatcher;
  8913. }
  8914. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ShaderModule & rhs ) VULKAN_HPP_NOEXCEPT
  8915. {
  8916. std::swap( m_device, rhs.m_device );
  8917. std::swap( m_shaderModule, rhs.m_shaderModule );
  8918. std::swap( m_allocator, rhs.m_allocator );
  8919. std::swap( m_dispatcher, rhs.m_dispatcher );
  8920. }
  8921. //=== VK_EXT_shader_module_identifier ===
  8922. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT getIdentifierEXT() const VULKAN_HPP_NOEXCEPT;
  8923. private:
  8924. VULKAN_HPP_NAMESPACE::Device m_device = {};
  8925. VULKAN_HPP_NAMESPACE::ShaderModule m_shaderModule = {};
  8926. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  8927. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  8928. };
  8929. class SurfaceKHR
  8930. {
  8931. public:
  8932. using CType = VkSurfaceKHR;
  8933. using CppType = vk::SurfaceKHR;
  8934. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR;
  8935. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  8936. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR;
  8937. public:
  8938. # if defined( VK_USE_PLATFORM_ANDROID_KHR )
  8939. SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  8940. VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo,
  8941. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8942. : m_instance( *instance )
  8943. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8944. , m_dispatcher( instance.getDispatcher() )
  8945. {
  8946. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  8947. instance.getDispatcher()->vkCreateAndroidSurfaceKHR( static_cast<VkInstance>( *instance ),
  8948. reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
  8949. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  8950. reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
  8951. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  8952. {
  8953. detail::throwResultException( result, "vkCreateAndroidSurfaceKHR" );
  8954. }
  8955. }
  8956. # endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  8957. # if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
  8958. SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  8959. VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo,
  8960. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8961. : m_instance( *instance )
  8962. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8963. , m_dispatcher( instance.getDispatcher() )
  8964. {
  8965. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  8966. instance.getDispatcher()->vkCreateDirectFBSurfaceEXT( static_cast<VkInstance>( *instance ),
  8967. reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
  8968. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  8969. reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
  8970. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  8971. {
  8972. detail::throwResultException( result, "vkCreateDirectFBSurfaceEXT" );
  8973. }
  8974. }
  8975. # endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  8976. SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  8977. VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo,
  8978. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8979. : m_instance( *instance )
  8980. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8981. , m_dispatcher( instance.getDispatcher() )
  8982. {
  8983. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  8984. instance.getDispatcher()->vkCreateDisplayPlaneSurfaceKHR( static_cast<VkInstance>( *instance ),
  8985. reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
  8986. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  8987. reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
  8988. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  8989. {
  8990. detail::throwResultException( result, "vkCreateDisplayPlaneSurfaceKHR" );
  8991. }
  8992. }
  8993. SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  8994. VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo,
  8995. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  8996. : m_instance( *instance )
  8997. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  8998. , m_dispatcher( instance.getDispatcher() )
  8999. {
  9000. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  9001. instance.getDispatcher()->vkCreateHeadlessSurfaceEXT( static_cast<VkInstance>( *instance ),
  9002. reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
  9003. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  9004. reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
  9005. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  9006. {
  9007. detail::throwResultException( result, "vkCreateHeadlessSurfaceEXT" );
  9008. }
  9009. }
  9010. # if defined( VK_USE_PLATFORM_IOS_MVK )
  9011. SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  9012. VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo,
  9013. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9014. : m_instance( *instance )
  9015. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9016. , m_dispatcher( instance.getDispatcher() )
  9017. {
  9018. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  9019. instance.getDispatcher()->vkCreateIOSSurfaceMVK( static_cast<VkInstance>( *instance ),
  9020. reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
  9021. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  9022. reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
  9023. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  9024. {
  9025. detail::throwResultException( result, "vkCreateIOSSurfaceMVK" );
  9026. }
  9027. }
  9028. # endif /*VK_USE_PLATFORM_IOS_MVK*/
  9029. # if defined( VK_USE_PLATFORM_FUCHSIA )
  9030. SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  9031. VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo,
  9032. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9033. : m_instance( *instance )
  9034. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9035. , m_dispatcher( instance.getDispatcher() )
  9036. {
  9037. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  9038. instance.getDispatcher()->vkCreateImagePipeSurfaceFUCHSIA( static_cast<VkInstance>( *instance ),
  9039. reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ),
  9040. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  9041. reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
  9042. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  9043. {
  9044. detail::throwResultException( result, "vkCreateImagePipeSurfaceFUCHSIA" );
  9045. }
  9046. }
  9047. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  9048. # if defined( VK_USE_PLATFORM_MACOS_MVK )
  9049. SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  9050. VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo,
  9051. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9052. : m_instance( *instance )
  9053. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9054. , m_dispatcher( instance.getDispatcher() )
  9055. {
  9056. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  9057. instance.getDispatcher()->vkCreateMacOSSurfaceMVK( static_cast<VkInstance>( *instance ),
  9058. reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
  9059. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  9060. reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
  9061. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  9062. {
  9063. detail::throwResultException( result, "vkCreateMacOSSurfaceMVK" );
  9064. }
  9065. }
  9066. # endif /*VK_USE_PLATFORM_MACOS_MVK*/
  9067. # if defined( VK_USE_PLATFORM_METAL_EXT )
  9068. SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  9069. VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo,
  9070. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9071. : m_instance( *instance )
  9072. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9073. , m_dispatcher( instance.getDispatcher() )
  9074. {
  9075. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  9076. instance.getDispatcher()->vkCreateMetalSurfaceEXT( static_cast<VkInstance>( *instance ),
  9077. reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ),
  9078. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  9079. reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
  9080. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  9081. {
  9082. detail::throwResultException( result, "vkCreateMetalSurfaceEXT" );
  9083. }
  9084. }
  9085. # endif /*VK_USE_PLATFORM_METAL_EXT*/
  9086. # if defined( VK_USE_PLATFORM_SCREEN_QNX )
  9087. SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  9088. VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo,
  9089. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9090. : m_instance( *instance )
  9091. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9092. , m_dispatcher( instance.getDispatcher() )
  9093. {
  9094. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  9095. instance.getDispatcher()->vkCreateScreenSurfaceQNX( static_cast<VkInstance>( *instance ),
  9096. reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
  9097. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  9098. reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
  9099. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  9100. {
  9101. detail::throwResultException( result, "vkCreateScreenSurfaceQNX" );
  9102. }
  9103. }
  9104. # endif /*VK_USE_PLATFORM_SCREEN_QNX*/
  9105. # if defined( VK_USE_PLATFORM_GGP )
  9106. SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  9107. VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo,
  9108. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9109. : m_instance( *instance )
  9110. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9111. , m_dispatcher( instance.getDispatcher() )
  9112. {
  9113. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  9114. instance.getDispatcher()->vkCreateStreamDescriptorSurfaceGGP( static_cast<VkInstance>( *instance ),
  9115. reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
  9116. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  9117. reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
  9118. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  9119. {
  9120. detail::throwResultException( result, "vkCreateStreamDescriptorSurfaceGGP" );
  9121. }
  9122. }
  9123. # endif /*VK_USE_PLATFORM_GGP*/
  9124. # if defined( VK_USE_PLATFORM_VI_NN )
  9125. SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  9126. VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo,
  9127. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9128. : m_instance( *instance )
  9129. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9130. , m_dispatcher( instance.getDispatcher() )
  9131. {
  9132. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  9133. instance.getDispatcher()->vkCreateViSurfaceNN( static_cast<VkInstance>( *instance ),
  9134. reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
  9135. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  9136. reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
  9137. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  9138. {
  9139. detail::throwResultException( result, "vkCreateViSurfaceNN" );
  9140. }
  9141. }
  9142. # endif /*VK_USE_PLATFORM_VI_NN*/
  9143. # if defined( VK_USE_PLATFORM_WAYLAND_KHR )
  9144. SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  9145. VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo,
  9146. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9147. : m_instance( *instance )
  9148. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9149. , m_dispatcher( instance.getDispatcher() )
  9150. {
  9151. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  9152. instance.getDispatcher()->vkCreateWaylandSurfaceKHR( static_cast<VkInstance>( *instance ),
  9153. reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
  9154. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  9155. reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
  9156. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  9157. {
  9158. detail::throwResultException( result, "vkCreateWaylandSurfaceKHR" );
  9159. }
  9160. }
  9161. # endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  9162. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  9163. SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  9164. VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo,
  9165. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9166. : m_instance( *instance )
  9167. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9168. , m_dispatcher( instance.getDispatcher() )
  9169. {
  9170. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  9171. instance.getDispatcher()->vkCreateWin32SurfaceKHR( static_cast<VkInstance>( *instance ),
  9172. reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
  9173. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  9174. reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
  9175. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  9176. {
  9177. detail::throwResultException( result, "vkCreateWin32SurfaceKHR" );
  9178. }
  9179. }
  9180. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  9181. # if defined( VK_USE_PLATFORM_XCB_KHR )
  9182. SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  9183. VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo,
  9184. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9185. : m_instance( *instance )
  9186. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9187. , m_dispatcher( instance.getDispatcher() )
  9188. {
  9189. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  9190. instance.getDispatcher()->vkCreateXcbSurfaceKHR( static_cast<VkInstance>( *instance ),
  9191. reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
  9192. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  9193. reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
  9194. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  9195. {
  9196. detail::throwResultException( result, "vkCreateXcbSurfaceKHR" );
  9197. }
  9198. }
  9199. # endif /*VK_USE_PLATFORM_XCB_KHR*/
  9200. # if defined( VK_USE_PLATFORM_XLIB_KHR )
  9201. SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  9202. VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo,
  9203. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9204. : m_instance( *instance )
  9205. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9206. , m_dispatcher( instance.getDispatcher() )
  9207. {
  9208. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  9209. instance.getDispatcher()->vkCreateXlibSurfaceKHR( static_cast<VkInstance>( *instance ),
  9210. reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
  9211. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  9212. reinterpret_cast<VkSurfaceKHR *>( &m_surface ) ) );
  9213. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  9214. {
  9215. detail::throwResultException( result, "vkCreateXlibSurfaceKHR" );
  9216. }
  9217. }
  9218. # endif /*VK_USE_PLATFORM_XLIB_KHR*/
  9219. SurfaceKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Instance const & instance,
  9220. VkSurfaceKHR surface,
  9221. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9222. : m_instance( *instance )
  9223. , m_surface( surface )
  9224. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9225. , m_dispatcher( instance.getDispatcher() )
  9226. {
  9227. }
  9228. SurfaceKHR( std::nullptr_t ) {}
  9229. ~SurfaceKHR()
  9230. {
  9231. clear();
  9232. }
  9233. SurfaceKHR() = delete;
  9234. SurfaceKHR( SurfaceKHR const & ) = delete;
  9235. SurfaceKHR( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT
  9236. : m_instance( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_instance, {} ) )
  9237. , m_surface( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_surface, {} ) )
  9238. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  9239. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  9240. {
  9241. }
  9242. SurfaceKHR & operator=( SurfaceKHR const & ) = delete;
  9243. SurfaceKHR & operator =( SurfaceKHR && rhs ) VULKAN_HPP_NOEXCEPT
  9244. {
  9245. if ( this != &rhs )
  9246. {
  9247. std::swap( m_instance, rhs.m_instance );
  9248. std::swap( m_surface, rhs.m_surface );
  9249. std::swap( m_allocator, rhs.m_allocator );
  9250. std::swap( m_dispatcher, rhs.m_dispatcher );
  9251. }
  9252. return *this;
  9253. }
  9254. VULKAN_HPP_NAMESPACE::SurfaceKHR const & operator*() const VULKAN_HPP_NOEXCEPT
  9255. {
  9256. return m_surface;
  9257. }
  9258. void clear() VULKAN_HPP_NOEXCEPT
  9259. {
  9260. if ( m_surface )
  9261. {
  9262. getDispatcher()->vkDestroySurfaceKHR(
  9263. static_cast<VkInstance>( m_instance ), static_cast<VkSurfaceKHR>( m_surface ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  9264. }
  9265. m_instance = nullptr;
  9266. m_surface = nullptr;
  9267. m_allocator = nullptr;
  9268. m_dispatcher = nullptr;
  9269. }
  9270. VULKAN_HPP_NAMESPACE::SurfaceKHR release()
  9271. {
  9272. m_instance = nullptr;
  9273. m_allocator = nullptr;
  9274. m_dispatcher = nullptr;
  9275. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_surface, nullptr );
  9276. }
  9277. VULKAN_HPP_NAMESPACE::Instance getInstance() const
  9278. {
  9279. return m_instance;
  9280. }
  9281. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * getDispatcher() const
  9282. {
  9283. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  9284. return m_dispatcher;
  9285. }
  9286. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR & rhs ) VULKAN_HPP_NOEXCEPT
  9287. {
  9288. std::swap( m_instance, rhs.m_instance );
  9289. std::swap( m_surface, rhs.m_surface );
  9290. std::swap( m_allocator, rhs.m_allocator );
  9291. std::swap( m_dispatcher, rhs.m_dispatcher );
  9292. }
  9293. private:
  9294. VULKAN_HPP_NAMESPACE::Instance m_instance = {};
  9295. VULKAN_HPP_NAMESPACE::SurfaceKHR m_surface = {};
  9296. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  9297. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher const * m_dispatcher = nullptr;
  9298. };
  9299. class SwapchainKHR
  9300. {
  9301. public:
  9302. using CType = VkSwapchainKHR;
  9303. using CppType = vk::SwapchainKHR;
  9304. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR;
  9305. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  9306. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR;
  9307. public:
  9308. SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  9309. VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
  9310. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9311. : m_device( *device )
  9312. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9313. , m_dispatcher( device.getDispatcher() )
  9314. {
  9315. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  9316. device.getDispatcher()->vkCreateSwapchainKHR( static_cast<VkDevice>( *device ),
  9317. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
  9318. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  9319. reinterpret_cast<VkSwapchainKHR *>( &m_swapchain ) ) );
  9320. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  9321. {
  9322. detail::throwResultException( result, "vkCreateSwapchainKHR" );
  9323. }
  9324. }
  9325. SwapchainKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  9326. VkSwapchainKHR swapchain,
  9327. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9328. : m_device( *device )
  9329. , m_swapchain( swapchain )
  9330. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9331. , m_dispatcher( device.getDispatcher() )
  9332. {
  9333. }
  9334. SwapchainKHR( std::nullptr_t ) {}
  9335. ~SwapchainKHR()
  9336. {
  9337. clear();
  9338. }
  9339. SwapchainKHR() = delete;
  9340. SwapchainKHR( SwapchainKHR const & ) = delete;
  9341. SwapchainKHR( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT
  9342. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  9343. , m_swapchain( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_swapchain, {} ) )
  9344. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  9345. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  9346. {
  9347. }
  9348. SwapchainKHR & operator=( SwapchainKHR const & ) = delete;
  9349. SwapchainKHR & operator =( SwapchainKHR && rhs ) VULKAN_HPP_NOEXCEPT
  9350. {
  9351. if ( this != &rhs )
  9352. {
  9353. std::swap( m_device, rhs.m_device );
  9354. std::swap( m_swapchain, rhs.m_swapchain );
  9355. std::swap( m_allocator, rhs.m_allocator );
  9356. std::swap( m_dispatcher, rhs.m_dispatcher );
  9357. }
  9358. return *this;
  9359. }
  9360. VULKAN_HPP_NAMESPACE::SwapchainKHR const & operator*() const VULKAN_HPP_NOEXCEPT
  9361. {
  9362. return m_swapchain;
  9363. }
  9364. void clear() VULKAN_HPP_NOEXCEPT
  9365. {
  9366. if ( m_swapchain )
  9367. {
  9368. getDispatcher()->vkDestroySwapchainKHR(
  9369. static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  9370. }
  9371. m_device = nullptr;
  9372. m_swapchain = nullptr;
  9373. m_allocator = nullptr;
  9374. m_dispatcher = nullptr;
  9375. }
  9376. VULKAN_HPP_NAMESPACE::SwapchainKHR release()
  9377. {
  9378. m_device = nullptr;
  9379. m_allocator = nullptr;
  9380. m_dispatcher = nullptr;
  9381. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_swapchain, nullptr );
  9382. }
  9383. VULKAN_HPP_NAMESPACE::Device getDevice() const
  9384. {
  9385. return m_device;
  9386. }
  9387. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  9388. {
  9389. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  9390. return m_dispatcher;
  9391. }
  9392. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR & rhs ) VULKAN_HPP_NOEXCEPT
  9393. {
  9394. std::swap( m_device, rhs.m_device );
  9395. std::swap( m_swapchain, rhs.m_swapchain );
  9396. std::swap( m_allocator, rhs.m_allocator );
  9397. std::swap( m_dispatcher, rhs.m_dispatcher );
  9398. }
  9399. //=== VK_KHR_swapchain ===
  9400. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::Image> getImages() const;
  9401. VULKAN_HPP_NODISCARD std::pair<VULKAN_HPP_NAMESPACE::Result, uint32_t>
  9402. acquireNextImage( uint64_t timeout,
  9403. VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT,
  9404. VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT ) const;
  9405. //=== VK_EXT_display_control ===
  9406. VULKAN_HPP_NODISCARD uint64_t getCounterEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter ) const;
  9407. //=== VK_GOOGLE_display_timing ===
  9408. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE getRefreshCycleDurationGOOGLE() const;
  9409. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE> getPastPresentationTimingGOOGLE() const;
  9410. //=== VK_KHR_shared_presentable_image ===
  9411. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result getStatus() const;
  9412. //=== VK_AMD_display_native_hdr ===
  9413. void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT;
  9414. //=== VK_KHR_present_wait ===
  9415. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Result waitForPresent( uint64_t presentId, uint64_t timeout ) const;
  9416. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  9417. //=== VK_EXT_full_screen_exclusive ===
  9418. void acquireFullScreenExclusiveModeEXT() const;
  9419. void releaseFullScreenExclusiveModeEXT() const;
  9420. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  9421. //=== VK_NV_low_latency2 ===
  9422. void setLatencySleepModeNV( const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo ) const;
  9423. void latencySleepNV( const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo ) const;
  9424. void setLatencyMarkerNV( const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT;
  9425. VULKAN_HPP_NODISCARD std::pair<uint32_t, VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV> getLatencyTimingsNV() const VULKAN_HPP_NOEXCEPT;
  9426. private:
  9427. VULKAN_HPP_NAMESPACE::Device m_device = {};
  9428. VULKAN_HPP_NAMESPACE::SwapchainKHR m_swapchain = {};
  9429. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  9430. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  9431. };
  9432. class SwapchainKHRs : public std::vector<VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR>
  9433. {
  9434. public:
  9435. SwapchainKHRs( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  9436. VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
  9437. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9438. {
  9439. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * dispatcher = device.getDispatcher();
  9440. std::vector<VkSwapchainKHR> swapchains( createInfos.size() );
  9441. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>( dispatcher->vkCreateSharedSwapchainsKHR(
  9442. static_cast<VkDevice>( *device ),
  9443. createInfos.size(),
  9444. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
  9445. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  9446. swapchains.data() ) );
  9447. if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
  9448. {
  9449. this->reserve( createInfos.size() );
  9450. for ( auto const & swapchainKHR : swapchains )
  9451. {
  9452. this->emplace_back( device, swapchainKHR, allocator );
  9453. }
  9454. }
  9455. else
  9456. {
  9457. detail::throwResultException( result, "vkCreateSharedSwapchainsKHR" );
  9458. }
  9459. }
  9460. SwapchainKHRs( std::nullptr_t ) {}
  9461. SwapchainKHRs() = delete;
  9462. SwapchainKHRs( SwapchainKHRs const & ) = delete;
  9463. SwapchainKHRs( SwapchainKHRs && rhs ) = default;
  9464. SwapchainKHRs & operator=( SwapchainKHRs const & ) = delete;
  9465. SwapchainKHRs & operator=( SwapchainKHRs && rhs ) = default;
  9466. };
  9467. class ValidationCacheEXT
  9468. {
  9469. public:
  9470. using CType = VkValidationCacheEXT;
  9471. using CppType = vk::ValidationCacheEXT;
  9472. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT;
  9473. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  9474. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT;
  9475. public:
  9476. ValidationCacheEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  9477. VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo,
  9478. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9479. : m_device( *device )
  9480. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9481. , m_dispatcher( device.getDispatcher() )
  9482. {
  9483. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  9484. device.getDispatcher()->vkCreateValidationCacheEXT( static_cast<VkDevice>( *device ),
  9485. reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
  9486. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  9487. reinterpret_cast<VkValidationCacheEXT *>( &m_validationCache ) ) );
  9488. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  9489. {
  9490. detail::throwResultException( result, "vkCreateValidationCacheEXT" );
  9491. }
  9492. }
  9493. ValidationCacheEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  9494. VkValidationCacheEXT validationCache,
  9495. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9496. : m_device( *device )
  9497. , m_validationCache( validationCache )
  9498. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9499. , m_dispatcher( device.getDispatcher() )
  9500. {
  9501. }
  9502. ValidationCacheEXT( std::nullptr_t ) {}
  9503. ~ValidationCacheEXT()
  9504. {
  9505. clear();
  9506. }
  9507. ValidationCacheEXT() = delete;
  9508. ValidationCacheEXT( ValidationCacheEXT const & ) = delete;
  9509. ValidationCacheEXT( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT
  9510. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  9511. , m_validationCache( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_validationCache, {} ) )
  9512. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  9513. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  9514. {
  9515. }
  9516. ValidationCacheEXT & operator=( ValidationCacheEXT const & ) = delete;
  9517. ValidationCacheEXT & operator =( ValidationCacheEXT && rhs ) VULKAN_HPP_NOEXCEPT
  9518. {
  9519. if ( this != &rhs )
  9520. {
  9521. std::swap( m_device, rhs.m_device );
  9522. std::swap( m_validationCache, rhs.m_validationCache );
  9523. std::swap( m_allocator, rhs.m_allocator );
  9524. std::swap( m_dispatcher, rhs.m_dispatcher );
  9525. }
  9526. return *this;
  9527. }
  9528. VULKAN_HPP_NAMESPACE::ValidationCacheEXT const & operator*() const VULKAN_HPP_NOEXCEPT
  9529. {
  9530. return m_validationCache;
  9531. }
  9532. void clear() VULKAN_HPP_NOEXCEPT
  9533. {
  9534. if ( m_validationCache )
  9535. {
  9536. getDispatcher()->vkDestroyValidationCacheEXT( static_cast<VkDevice>( m_device ),
  9537. static_cast<VkValidationCacheEXT>( m_validationCache ),
  9538. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  9539. }
  9540. m_device = nullptr;
  9541. m_validationCache = nullptr;
  9542. m_allocator = nullptr;
  9543. m_dispatcher = nullptr;
  9544. }
  9545. VULKAN_HPP_NAMESPACE::ValidationCacheEXT release()
  9546. {
  9547. m_device = nullptr;
  9548. m_allocator = nullptr;
  9549. m_dispatcher = nullptr;
  9550. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_validationCache, nullptr );
  9551. }
  9552. VULKAN_HPP_NAMESPACE::Device getDevice() const
  9553. {
  9554. return m_device;
  9555. }
  9556. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  9557. {
  9558. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  9559. return m_dispatcher;
  9560. }
  9561. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT & rhs ) VULKAN_HPP_NOEXCEPT
  9562. {
  9563. std::swap( m_device, rhs.m_device );
  9564. std::swap( m_validationCache, rhs.m_validationCache );
  9565. std::swap( m_allocator, rhs.m_allocator );
  9566. std::swap( m_dispatcher, rhs.m_dispatcher );
  9567. }
  9568. //=== VK_EXT_validation_cache ===
  9569. void merge( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches ) const;
  9570. VULKAN_HPP_NODISCARD std::vector<uint8_t> getData() const;
  9571. private:
  9572. VULKAN_HPP_NAMESPACE::Device m_device = {};
  9573. VULKAN_HPP_NAMESPACE::ValidationCacheEXT m_validationCache = {};
  9574. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  9575. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  9576. };
  9577. class VideoSessionKHR
  9578. {
  9579. public:
  9580. using CType = VkVideoSessionKHR;
  9581. using CppType = vk::VideoSessionKHR;
  9582. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionKHR;
  9583. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  9584. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
  9585. public:
  9586. VideoSessionKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  9587. VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo,
  9588. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9589. : m_device( *device )
  9590. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9591. , m_dispatcher( device.getDispatcher() )
  9592. {
  9593. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  9594. device.getDispatcher()->vkCreateVideoSessionKHR( static_cast<VkDevice>( *device ),
  9595. reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ),
  9596. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  9597. reinterpret_cast<VkVideoSessionKHR *>( &m_videoSession ) ) );
  9598. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  9599. {
  9600. detail::throwResultException( result, "vkCreateVideoSessionKHR" );
  9601. }
  9602. }
  9603. VideoSessionKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  9604. VkVideoSessionKHR videoSession,
  9605. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9606. : m_device( *device )
  9607. , m_videoSession( videoSession )
  9608. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9609. , m_dispatcher( device.getDispatcher() )
  9610. {
  9611. }
  9612. VideoSessionKHR( std::nullptr_t ) {}
  9613. ~VideoSessionKHR()
  9614. {
  9615. clear();
  9616. }
  9617. VideoSessionKHR() = delete;
  9618. VideoSessionKHR( VideoSessionKHR const & ) = delete;
  9619. VideoSessionKHR( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT
  9620. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  9621. , m_videoSession( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSession, {} ) )
  9622. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  9623. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  9624. {
  9625. }
  9626. VideoSessionKHR & operator=( VideoSessionKHR const & ) = delete;
  9627. VideoSessionKHR & operator =( VideoSessionKHR && rhs ) VULKAN_HPP_NOEXCEPT
  9628. {
  9629. if ( this != &rhs )
  9630. {
  9631. std::swap( m_device, rhs.m_device );
  9632. std::swap( m_videoSession, rhs.m_videoSession );
  9633. std::swap( m_allocator, rhs.m_allocator );
  9634. std::swap( m_dispatcher, rhs.m_dispatcher );
  9635. }
  9636. return *this;
  9637. }
  9638. VULKAN_HPP_NAMESPACE::VideoSessionKHR const & operator*() const VULKAN_HPP_NOEXCEPT
  9639. {
  9640. return m_videoSession;
  9641. }
  9642. void clear() VULKAN_HPP_NOEXCEPT
  9643. {
  9644. if ( m_videoSession )
  9645. {
  9646. getDispatcher()->vkDestroyVideoSessionKHR( static_cast<VkDevice>( m_device ),
  9647. static_cast<VkVideoSessionKHR>( m_videoSession ),
  9648. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  9649. }
  9650. m_device = nullptr;
  9651. m_videoSession = nullptr;
  9652. m_allocator = nullptr;
  9653. m_dispatcher = nullptr;
  9654. }
  9655. VULKAN_HPP_NAMESPACE::VideoSessionKHR release()
  9656. {
  9657. m_device = nullptr;
  9658. m_allocator = nullptr;
  9659. m_dispatcher = nullptr;
  9660. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_videoSession, nullptr );
  9661. }
  9662. VULKAN_HPP_NAMESPACE::Device getDevice() const
  9663. {
  9664. return m_device;
  9665. }
  9666. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  9667. {
  9668. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  9669. return m_dispatcher;
  9670. }
  9671. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR & rhs ) VULKAN_HPP_NOEXCEPT
  9672. {
  9673. std::swap( m_device, rhs.m_device );
  9674. std::swap( m_videoSession, rhs.m_videoSession );
  9675. std::swap( m_allocator, rhs.m_allocator );
  9676. std::swap( m_dispatcher, rhs.m_dispatcher );
  9677. }
  9678. //=== VK_KHR_video_queue ===
  9679. VULKAN_HPP_NODISCARD std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR> getMemoryRequirements() const;
  9680. void bindMemory( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos ) const;
  9681. private:
  9682. VULKAN_HPP_NAMESPACE::Device m_device = {};
  9683. VULKAN_HPP_NAMESPACE::VideoSessionKHR m_videoSession = {};
  9684. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  9685. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  9686. };
  9687. class VideoSessionParametersKHR
  9688. {
  9689. public:
  9690. using CType = VkVideoSessionParametersKHR;
  9691. using CppType = vk::VideoSessionParametersKHR;
  9692. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eVideoSessionParametersKHR;
  9693. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType =
  9694. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
  9695. public:
  9696. VideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  9697. VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo,
  9698. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9699. : m_device( *device )
  9700. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9701. , m_dispatcher( device.getDispatcher() )
  9702. {
  9703. VULKAN_HPP_NAMESPACE::Result result = static_cast<VULKAN_HPP_NAMESPACE::Result>(
  9704. device.getDispatcher()->vkCreateVideoSessionParametersKHR( static_cast<VkDevice>( *device ),
  9705. reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ),
  9706. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ),
  9707. reinterpret_cast<VkVideoSessionParametersKHR *>( &m_videoSessionParameters ) ) );
  9708. if ( result != VULKAN_HPP_NAMESPACE::Result::eSuccess )
  9709. {
  9710. detail::throwResultException( result, "vkCreateVideoSessionParametersKHR" );
  9711. }
  9712. }
  9713. VideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::Device const & device,
  9714. VkVideoSessionParametersKHR videoSessionParameters,
  9715. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator = nullptr )
  9716. : m_device( *device )
  9717. , m_videoSessionParameters( videoSessionParameters )
  9718. , m_allocator( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) )
  9719. , m_dispatcher( device.getDispatcher() )
  9720. {
  9721. }
  9722. VideoSessionParametersKHR( std::nullptr_t ) {}
  9723. ~VideoSessionParametersKHR()
  9724. {
  9725. clear();
  9726. }
  9727. VideoSessionParametersKHR() = delete;
  9728. VideoSessionParametersKHR( VideoSessionParametersKHR const & ) = delete;
  9729. VideoSessionParametersKHR( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT
  9730. : m_device( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_device, {} ) )
  9731. , m_videoSessionParameters( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_videoSessionParameters, {} ) )
  9732. , m_allocator( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_allocator, {} ) )
  9733. , m_dispatcher( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( rhs.m_dispatcher, nullptr ) )
  9734. {
  9735. }
  9736. VideoSessionParametersKHR & operator=( VideoSessionParametersKHR const & ) = delete;
  9737. VideoSessionParametersKHR & operator =( VideoSessionParametersKHR && rhs ) VULKAN_HPP_NOEXCEPT
  9738. {
  9739. if ( this != &rhs )
  9740. {
  9741. std::swap( m_device, rhs.m_device );
  9742. std::swap( m_videoSessionParameters, rhs.m_videoSessionParameters );
  9743. std::swap( m_allocator, rhs.m_allocator );
  9744. std::swap( m_dispatcher, rhs.m_dispatcher );
  9745. }
  9746. return *this;
  9747. }
  9748. VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR const & operator*() const VULKAN_HPP_NOEXCEPT
  9749. {
  9750. return m_videoSessionParameters;
  9751. }
  9752. void clear() VULKAN_HPP_NOEXCEPT
  9753. {
  9754. if ( m_videoSessionParameters )
  9755. {
  9756. getDispatcher()->vkDestroyVideoSessionParametersKHR( static_cast<VkDevice>( m_device ),
  9757. static_cast<VkVideoSessionParametersKHR>( m_videoSessionParameters ),
  9758. reinterpret_cast<const VkAllocationCallbacks *>( m_allocator ) );
  9759. }
  9760. m_device = nullptr;
  9761. m_videoSessionParameters = nullptr;
  9762. m_allocator = nullptr;
  9763. m_dispatcher = nullptr;
  9764. }
  9765. VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR release()
  9766. {
  9767. m_device = nullptr;
  9768. m_allocator = nullptr;
  9769. m_dispatcher = nullptr;
  9770. return VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::exchange( m_videoSessionParameters, nullptr );
  9771. }
  9772. VULKAN_HPP_NAMESPACE::Device getDevice() const
  9773. {
  9774. return m_device;
  9775. }
  9776. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * getDispatcher() const
  9777. {
  9778. VULKAN_HPP_ASSERT( m_dispatcher->getVkHeaderVersion() == VK_HEADER_VERSION );
  9779. return m_dispatcher;
  9780. }
  9781. void swap( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR & rhs ) VULKAN_HPP_NOEXCEPT
  9782. {
  9783. std::swap( m_device, rhs.m_device );
  9784. std::swap( m_videoSessionParameters, rhs.m_videoSessionParameters );
  9785. std::swap( m_allocator, rhs.m_allocator );
  9786. std::swap( m_dispatcher, rhs.m_dispatcher );
  9787. }
  9788. //=== VK_KHR_video_queue ===
  9789. void update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const;
  9790. private:
  9791. VULKAN_HPP_NAMESPACE::Device m_device = {};
  9792. VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR m_videoSessionParameters = {};
  9793. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * m_allocator = {};
  9794. VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher const * m_dispatcher = nullptr;
  9795. };
  9796. //===========================
  9797. //=== COMMAND Definitions ===
  9798. //===========================
  9799. //=== VK_VERSION_1_0 ===
  9800. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Instance
  9801. Context::createInstance( VULKAN_HPP_NAMESPACE::InstanceCreateInfo const & createInfo,
  9802. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  9803. {
  9804. return VULKAN_HPP_RAII_NAMESPACE::Instance( *this, createInfo, allocator );
  9805. }
  9806. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice> Instance::enumeratePhysicalDevices() const
  9807. {
  9808. return VULKAN_HPP_RAII_NAMESPACE::PhysicalDevices( *this );
  9809. }
  9810. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures() const VULKAN_HPP_NOEXCEPT
  9811. {
  9812. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures && "Function <vkGetPhysicalDeviceFeatures> requires <VK_VERSION_1_0>" );
  9813. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
  9814. getDispatcher()->vkGetPhysicalDeviceFeatures( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  9815. reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) );
  9816. return features;
  9817. }
  9818. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties
  9819. PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
  9820. {
  9821. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties && "Function <vkGetPhysicalDeviceFormatProperties> requires <VK_VERSION_1_0>" );
  9822. VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
  9823. getDispatcher()->vkGetPhysicalDeviceFormatProperties(
  9824. static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
  9825. return formatProperties;
  9826. }
  9827. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties
  9828. PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
  9829. VULKAN_HPP_NAMESPACE::ImageType type,
  9830. VULKAN_HPP_NAMESPACE::ImageTiling tiling,
  9831. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  9832. VULKAN_HPP_NAMESPACE::ImageCreateFlags flags ) const
  9833. {
  9834. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties &&
  9835. "Function <vkGetPhysicalDeviceImageFormatProperties> requires <VK_VERSION_1_0>" );
  9836. VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
  9837. VkResult result = getDispatcher()->vkGetPhysicalDeviceImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  9838. static_cast<VkFormat>( format ),
  9839. static_cast<VkImageType>( type ),
  9840. static_cast<VkImageTiling>( tiling ),
  9841. static_cast<VkImageUsageFlags>( usage ),
  9842. static_cast<VkImageCreateFlags>( flags ),
  9843. reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) );
  9844. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
  9845. return imageFormatProperties;
  9846. }
  9847. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties() const VULKAN_HPP_NOEXCEPT
  9848. {
  9849. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties && "Function <vkGetPhysicalDeviceProperties> requires <VK_VERSION_1_0>" );
  9850. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
  9851. getDispatcher()->vkGetPhysicalDeviceProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  9852. reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) );
  9853. return properties;
  9854. }
  9855. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties> PhysicalDevice::getQueueFamilyProperties() const
  9856. {
  9857. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties &&
  9858. "Function <vkGetPhysicalDeviceQueueFamilyProperties> requires <VK_VERSION_1_0>" );
  9859. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties> queueFamilyProperties;
  9860. uint32_t queueFamilyPropertyCount;
  9861. getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
  9862. queueFamilyProperties.resize( queueFamilyPropertyCount );
  9863. getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  9864. &queueFamilyPropertyCount,
  9865. reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
  9866. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  9867. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  9868. {
  9869. queueFamilyProperties.resize( queueFamilyPropertyCount );
  9870. }
  9871. return queueFamilyProperties;
  9872. }
  9873. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties() const VULKAN_HPP_NOEXCEPT
  9874. {
  9875. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties && "Function <vkGetPhysicalDeviceMemoryProperties> requires <VK_VERSION_1_0>" );
  9876. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
  9877. getDispatcher()->vkGetPhysicalDeviceMemoryProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  9878. reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
  9879. return memoryProperties;
  9880. }
  9881. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT
  9882. {
  9883. VULKAN_HPP_ASSERT( getDispatcher()->vkGetInstanceProcAddr && "Function <vkGetInstanceProcAddr> requires <VK_VERSION_1_0>" );
  9884. PFN_vkVoidFunction result = getDispatcher()->vkGetInstanceProcAddr( static_cast<VkInstance>( m_instance ), name.c_str() );
  9885. return result;
  9886. }
  9887. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name ) const VULKAN_HPP_NOEXCEPT
  9888. {
  9889. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceProcAddr && "Function <vkGetDeviceProcAddr> requires <VK_VERSION_1_0>" );
  9890. PFN_vkVoidFunction result = getDispatcher()->vkGetDeviceProcAddr( static_cast<VkDevice>( m_device ), name.c_str() );
  9891. return result;
  9892. }
  9893. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Device
  9894. PhysicalDevice::createDevice( VULKAN_HPP_NAMESPACE::DeviceCreateInfo const & createInfo,
  9895. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  9896. {
  9897. return VULKAN_HPP_RAII_NAMESPACE::Device( *this, createInfo, allocator );
  9898. }
  9899. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties>
  9900. Context::enumerateInstanceExtensionProperties( Optional<const std::string> layerName ) const
  9901. {
  9902. VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateInstanceExtensionProperties &&
  9903. "Function <vkEnumerateInstanceExtensionProperties> requires <VK_VERSION_1_0>" );
  9904. std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties> properties;
  9905. uint32_t propertyCount;
  9906. VkResult result;
  9907. do
  9908. {
  9909. result = getDispatcher()->vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
  9910. if ( ( result == VK_SUCCESS ) && propertyCount )
  9911. {
  9912. properties.resize( propertyCount );
  9913. result = getDispatcher()->vkEnumerateInstanceExtensionProperties(
  9914. layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
  9915. }
  9916. } while ( result == VK_INCOMPLETE );
  9917. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceExtensionProperties" );
  9918. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  9919. if ( propertyCount < properties.size() )
  9920. {
  9921. properties.resize( propertyCount );
  9922. }
  9923. return properties;
  9924. }
  9925. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties>
  9926. PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName ) const
  9927. {
  9928. VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateDeviceExtensionProperties && "Function <vkEnumerateDeviceExtensionProperties> requires <VK_VERSION_1_0>" );
  9929. std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties> properties;
  9930. uint32_t propertyCount;
  9931. VkResult result;
  9932. do
  9933. {
  9934. result = getDispatcher()->vkEnumerateDeviceExtensionProperties(
  9935. static_cast<VkPhysicalDevice>( m_physicalDevice ), layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
  9936. if ( ( result == VK_SUCCESS ) && propertyCount )
  9937. {
  9938. properties.resize( propertyCount );
  9939. result = getDispatcher()->vkEnumerateDeviceExtensionProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  9940. layerName ? layerName->c_str() : nullptr,
  9941. &propertyCount,
  9942. reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
  9943. }
  9944. } while ( result == VK_INCOMPLETE );
  9945. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
  9946. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  9947. if ( propertyCount < properties.size() )
  9948. {
  9949. properties.resize( propertyCount );
  9950. }
  9951. return properties;
  9952. }
  9953. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> Context::enumerateInstanceLayerProperties() const
  9954. {
  9955. VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateInstanceLayerProperties && "Function <vkEnumerateInstanceLayerProperties> requires <VK_VERSION_1_0>" );
  9956. std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> properties;
  9957. uint32_t propertyCount;
  9958. VkResult result;
  9959. do
  9960. {
  9961. result = getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, nullptr );
  9962. if ( ( result == VK_SUCCESS ) && propertyCount )
  9963. {
  9964. properties.resize( propertyCount );
  9965. result = getDispatcher()->vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
  9966. }
  9967. } while ( result == VK_INCOMPLETE );
  9968. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceLayerProperties" );
  9969. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  9970. if ( propertyCount < properties.size() )
  9971. {
  9972. properties.resize( propertyCount );
  9973. }
  9974. return properties;
  9975. }
  9976. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> PhysicalDevice::enumerateDeviceLayerProperties() const
  9977. {
  9978. VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateDeviceLayerProperties && "Function <vkEnumerateDeviceLayerProperties> requires <VK_VERSION_1_0>" );
  9979. std::vector<VULKAN_HPP_NAMESPACE::LayerProperties> properties;
  9980. uint32_t propertyCount;
  9981. VkResult result;
  9982. do
  9983. {
  9984. result = getDispatcher()->vkEnumerateDeviceLayerProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
  9985. if ( ( result == VK_SUCCESS ) && propertyCount )
  9986. {
  9987. properties.resize( propertyCount );
  9988. result = getDispatcher()->vkEnumerateDeviceLayerProperties(
  9989. static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
  9990. }
  9991. } while ( result == VK_INCOMPLETE );
  9992. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
  9993. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  9994. if ( propertyCount < properties.size() )
  9995. {
  9996. properties.resize( propertyCount );
  9997. }
  9998. return properties;
  9999. }
  10000. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex ) const
  10001. {
  10002. return VULKAN_HPP_RAII_NAMESPACE::Queue( *this, queueFamilyIndex, queueIndex );
  10003. }
  10004. VULKAN_HPP_INLINE void Queue::submit( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits,
  10005. VULKAN_HPP_NAMESPACE::Fence fence ) const
  10006. {
  10007. VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit && "Function <vkQueueSubmit> requires <VK_VERSION_1_0>" );
  10008. VkResult result = getDispatcher()->vkQueueSubmit(
  10009. static_cast<VkQueue>( m_queue ), submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) );
  10010. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
  10011. }
  10012. VULKAN_HPP_INLINE void Queue::waitIdle() const
  10013. {
  10014. VULKAN_HPP_ASSERT( getDispatcher()->vkQueueWaitIdle && "Function <vkQueueWaitIdle> requires <VK_VERSION_1_0>" );
  10015. VkResult result = getDispatcher()->vkQueueWaitIdle( static_cast<VkQueue>( m_queue ) );
  10016. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
  10017. }
  10018. VULKAN_HPP_INLINE void Device::waitIdle() const
  10019. {
  10020. VULKAN_HPP_ASSERT( getDispatcher()->vkDeviceWaitIdle && "Function <vkDeviceWaitIdle> requires <VK_VERSION_1_0>" );
  10021. VkResult result = getDispatcher()->vkDeviceWaitIdle( static_cast<VkDevice>( m_device ) );
  10022. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
  10023. }
  10024. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DeviceMemory
  10025. Device::allocateMemory( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const & allocateInfo,
  10026. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10027. {
  10028. return VULKAN_HPP_RAII_NAMESPACE::DeviceMemory( *this, allocateInfo, allocator );
  10029. }
  10030. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * DeviceMemory::mapMemory( VULKAN_HPP_NAMESPACE::DeviceSize offset,
  10031. VULKAN_HPP_NAMESPACE::DeviceSize size,
  10032. VULKAN_HPP_NAMESPACE::MemoryMapFlags flags ) const
  10033. {
  10034. VULKAN_HPP_ASSERT( getDispatcher()->vkMapMemory && "Function <vkMapMemory> requires <VK_VERSION_1_0>" );
  10035. void * pData;
  10036. VkResult result = getDispatcher()->vkMapMemory( static_cast<VkDevice>( m_device ),
  10037. static_cast<VkDeviceMemory>( m_memory ),
  10038. static_cast<VkDeviceSize>( offset ),
  10039. static_cast<VkDeviceSize>( size ),
  10040. static_cast<VkMemoryMapFlags>( flags ),
  10041. &pData );
  10042. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::mapMemory" );
  10043. return pData;
  10044. }
  10045. VULKAN_HPP_INLINE void DeviceMemory::unmapMemory() const VULKAN_HPP_NOEXCEPT
  10046. {
  10047. VULKAN_HPP_ASSERT( getDispatcher()->vkUnmapMemory && "Function <vkUnmapMemory> requires <VK_VERSION_1_0>" );
  10048. getDispatcher()->vkUnmapMemory( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ) );
  10049. }
  10050. VULKAN_HPP_INLINE void
  10051. Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const
  10052. {
  10053. VULKAN_HPP_ASSERT( getDispatcher()->vkFlushMappedMemoryRanges && "Function <vkFlushMappedMemoryRanges> requires <VK_VERSION_1_0>" );
  10054. VkResult result = getDispatcher()->vkFlushMappedMemoryRanges(
  10055. static_cast<VkDevice>( m_device ), memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
  10056. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
  10057. }
  10058. VULKAN_HPP_INLINE void
  10059. Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges ) const
  10060. {
  10061. VULKAN_HPP_ASSERT( getDispatcher()->vkInvalidateMappedMemoryRanges && "Function <vkInvalidateMappedMemoryRanges> requires <VK_VERSION_1_0>" );
  10062. VkResult result = getDispatcher()->vkInvalidateMappedMemoryRanges(
  10063. static_cast<VkDevice>( m_device ), memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
  10064. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
  10065. }
  10066. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize DeviceMemory::getCommitment() const VULKAN_HPP_NOEXCEPT
  10067. {
  10068. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMemoryCommitment && "Function <vkGetDeviceMemoryCommitment> requires <VK_VERSION_1_0>" );
  10069. VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
  10070. getDispatcher()->vkGetDeviceMemoryCommitment(
  10071. static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
  10072. return committedMemoryInBytes;
  10073. }
  10074. VULKAN_HPP_INLINE void Buffer::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const
  10075. {
  10076. VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory && "Function <vkBindBufferMemory> requires <VK_VERSION_1_0>" );
  10077. VkResult result = getDispatcher()->vkBindBufferMemory( static_cast<VkDevice>( m_device ),
  10078. static_cast<VkBuffer>( m_buffer ),
  10079. static_cast<VkDeviceMemory>( memory ),
  10080. static_cast<VkDeviceSize>( memoryOffset ) );
  10081. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Buffer::bindMemory" );
  10082. }
  10083. VULKAN_HPP_INLINE void Image::bindMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset ) const
  10084. {
  10085. VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory && "Function <vkBindImageMemory> requires <VK_VERSION_1_0>" );
  10086. VkResult result = getDispatcher()->vkBindImageMemory(
  10087. static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
  10088. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Image::bindMemory" );
  10089. }
  10090. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Buffer::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT
  10091. {
  10092. VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements && "Function <vkGetBufferMemoryRequirements> requires <VK_VERSION_1_0>" );
  10093. VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
  10094. getDispatcher()->vkGetBufferMemoryRequirements(
  10095. static_cast<VkDevice>( m_device ), static_cast<VkBuffer>( m_buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
  10096. return memoryRequirements;
  10097. }
  10098. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Image::getMemoryRequirements() const VULKAN_HPP_NOEXCEPT
  10099. {
  10100. VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements && "Function <vkGetImageMemoryRequirements> requires <VK_VERSION_1_0>" );
  10101. VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
  10102. getDispatcher()->vkGetImageMemoryRequirements(
  10103. static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
  10104. return memoryRequirements;
  10105. }
  10106. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements> Image::getSparseMemoryRequirements() const
  10107. {
  10108. VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSparseMemoryRequirements && "Function <vkGetImageSparseMemoryRequirements> requires <VK_VERSION_1_0>" );
  10109. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements> sparseMemoryRequirements;
  10110. uint32_t sparseMemoryRequirementCount;
  10111. getDispatcher()->vkGetImageSparseMemoryRequirements(
  10112. static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), &sparseMemoryRequirementCount, nullptr );
  10113. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  10114. getDispatcher()->vkGetImageSparseMemoryRequirements( static_cast<VkDevice>( m_device ),
  10115. static_cast<VkImage>( m_image ),
  10116. &sparseMemoryRequirementCount,
  10117. reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
  10118. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  10119. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  10120. {
  10121. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  10122. }
  10123. return sparseMemoryRequirements;
  10124. }
  10125. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>
  10126. PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
  10127. VULKAN_HPP_NAMESPACE::ImageType type,
  10128. VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
  10129. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  10130. VULKAN_HPP_NAMESPACE::ImageTiling tiling ) const
  10131. {
  10132. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties &&
  10133. "Function <vkGetPhysicalDeviceSparseImageFormatProperties> requires <VK_VERSION_1_0>" );
  10134. std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties> properties;
  10135. uint32_t propertyCount;
  10136. getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  10137. static_cast<VkFormat>( format ),
  10138. static_cast<VkImageType>( type ),
  10139. static_cast<VkSampleCountFlagBits>( samples ),
  10140. static_cast<VkImageUsageFlags>( usage ),
  10141. static_cast<VkImageTiling>( tiling ),
  10142. &propertyCount,
  10143. nullptr );
  10144. properties.resize( propertyCount );
  10145. getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  10146. static_cast<VkFormat>( format ),
  10147. static_cast<VkImageType>( type ),
  10148. static_cast<VkSampleCountFlagBits>( samples ),
  10149. static_cast<VkImageUsageFlags>( usage ),
  10150. static_cast<VkImageTiling>( tiling ),
  10151. &propertyCount,
  10152. reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
  10153. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  10154. if ( propertyCount < properties.size() )
  10155. {
  10156. properties.resize( propertyCount );
  10157. }
  10158. return properties;
  10159. }
  10160. VULKAN_HPP_INLINE void Queue::bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo,
  10161. VULKAN_HPP_NAMESPACE::Fence fence ) const
  10162. {
  10163. VULKAN_HPP_ASSERT( getDispatcher()->vkQueueBindSparse && "Function <vkQueueBindSparse> requires <VK_VERSION_1_0>" );
  10164. VkResult result = getDispatcher()->vkQueueBindSparse(
  10165. static_cast<VkQueue>( m_queue ), bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) );
  10166. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" );
  10167. }
  10168. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence
  10169. Device::createFence( VULKAN_HPP_NAMESPACE::FenceCreateInfo const & createInfo,
  10170. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10171. {
  10172. return VULKAN_HPP_RAII_NAMESPACE::Fence( *this, createInfo, allocator );
  10173. }
  10174. VULKAN_HPP_INLINE void Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences ) const
  10175. {
  10176. VULKAN_HPP_ASSERT( getDispatcher()->vkResetFences && "Function <vkResetFences> requires <VK_VERSION_1_0>" );
  10177. VkResult result = getDispatcher()->vkResetFences( static_cast<VkDevice>( m_device ), fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) );
  10178. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
  10179. }
  10180. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Fence::getStatus() const
  10181. {
  10182. VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceStatus && "Function <vkGetFenceStatus> requires <VK_VERSION_1_0>" );
  10183. VkResult result = getDispatcher()->vkGetFenceStatus( static_cast<VkDevice>( m_device ), static_cast<VkFence>( m_fence ) );
  10184. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  10185. VULKAN_HPP_NAMESPACE_STRING "::Fence::getStatus",
  10186. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
  10187. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  10188. }
  10189. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitForFences(
  10190. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout ) const
  10191. {
  10192. VULKAN_HPP_ASSERT( getDispatcher()->vkWaitForFences && "Function <vkWaitForFences> requires <VK_VERSION_1_0>" );
  10193. VkResult result = getDispatcher()->vkWaitForFences(
  10194. static_cast<VkDevice>( m_device ), fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout );
  10195. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  10196. VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences",
  10197. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
  10198. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  10199. }
  10200. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Semaphore
  10201. Device::createSemaphore( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const & createInfo,
  10202. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10203. {
  10204. return VULKAN_HPP_RAII_NAMESPACE::Semaphore( *this, createInfo, allocator );
  10205. }
  10206. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Event
  10207. Device::createEvent( VULKAN_HPP_NAMESPACE::EventCreateInfo const & createInfo,
  10208. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10209. {
  10210. return VULKAN_HPP_RAII_NAMESPACE::Event( *this, createInfo, allocator );
  10211. }
  10212. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Event::getStatus() const
  10213. {
  10214. VULKAN_HPP_ASSERT( getDispatcher()->vkGetEventStatus && "Function <vkGetEventStatus> requires <VK_VERSION_1_0>" );
  10215. VkResult result = getDispatcher()->vkGetEventStatus( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ) );
  10216. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  10217. VULKAN_HPP_NAMESPACE_STRING "::Event::getStatus",
  10218. { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } );
  10219. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  10220. }
  10221. VULKAN_HPP_INLINE void Event::set() const
  10222. {
  10223. VULKAN_HPP_ASSERT( getDispatcher()->vkSetEvent && "Function <vkSetEvent> requires <VK_VERSION_1_0>" );
  10224. VkResult result = getDispatcher()->vkSetEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ) );
  10225. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Event::set" );
  10226. }
  10227. VULKAN_HPP_INLINE void Event::reset() const
  10228. {
  10229. VULKAN_HPP_ASSERT( getDispatcher()->vkResetEvent && "Function <vkResetEvent> requires <VK_VERSION_1_0>" );
  10230. VkResult result = getDispatcher()->vkResetEvent( static_cast<VkDevice>( m_device ), static_cast<VkEvent>( m_event ) );
  10231. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Event::reset" );
  10232. }
  10233. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::QueryPool
  10234. Device::createQueryPool( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const & createInfo,
  10235. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10236. {
  10237. return VULKAN_HPP_RAII_NAMESPACE::QueryPool( *this, createInfo, allocator );
  10238. }
  10239. template <typename DataType>
  10240. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, std::vector<DataType>> QueryPool::getResults(
  10241. uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const
  10242. {
  10243. VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueryPoolResults && "Function <vkGetQueryPoolResults> requires <VK_VERSION_1_0>" );
  10244. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  10245. std::vector<DataType> data( dataSize / sizeof( DataType ) );
  10246. VkResult result = getDispatcher()->vkGetQueryPoolResults( static_cast<VkDevice>( m_device ),
  10247. static_cast<VkQueryPool>( m_queryPool ),
  10248. firstQuery,
  10249. queryCount,
  10250. data.size() * sizeof( DataType ),
  10251. reinterpret_cast<void *>( data.data() ),
  10252. static_cast<VkDeviceSize>( stride ),
  10253. static_cast<VkQueryResultFlags>( flags ) );
  10254. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  10255. VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResults",
  10256. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
  10257. return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  10258. }
  10259. template <typename DataType>
  10260. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, DataType> QueryPool::getResult(
  10261. uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const
  10262. {
  10263. VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueryPoolResults && "Function <vkGetQueryPoolResults> requires <VK_VERSION_1_0>" );
  10264. DataType data;
  10265. VkResult result = getDispatcher()->vkGetQueryPoolResults( static_cast<VkDevice>( m_device ),
  10266. static_cast<VkQueryPool>( m_queryPool ),
  10267. firstQuery,
  10268. queryCount,
  10269. sizeof( DataType ),
  10270. reinterpret_cast<void *>( &data ),
  10271. static_cast<VkDeviceSize>( stride ),
  10272. static_cast<VkQueryResultFlags>( flags ) );
  10273. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  10274. VULKAN_HPP_NAMESPACE_STRING "::QueryPool::getResult",
  10275. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
  10276. return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  10277. }
  10278. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Buffer
  10279. Device::createBuffer( VULKAN_HPP_NAMESPACE::BufferCreateInfo const & createInfo,
  10280. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10281. {
  10282. return VULKAN_HPP_RAII_NAMESPACE::Buffer( *this, createInfo, allocator );
  10283. }
  10284. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::BufferView
  10285. Device::createBufferView( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const & createInfo,
  10286. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10287. {
  10288. return VULKAN_HPP_RAII_NAMESPACE::BufferView( *this, createInfo, allocator );
  10289. }
  10290. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Image
  10291. Device::createImage( VULKAN_HPP_NAMESPACE::ImageCreateInfo const & createInfo,
  10292. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10293. {
  10294. return VULKAN_HPP_RAII_NAMESPACE::Image( *this, createInfo, allocator );
  10295. }
  10296. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout
  10297. Image::getSubresourceLayout( const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource ) const VULKAN_HPP_NOEXCEPT
  10298. {
  10299. VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSubresourceLayout && "Function <vkGetImageSubresourceLayout> requires <VK_VERSION_1_0>" );
  10300. VULKAN_HPP_NAMESPACE::SubresourceLayout layout;
  10301. getDispatcher()->vkGetImageSubresourceLayout( static_cast<VkDevice>( m_device ),
  10302. static_cast<VkImage>( m_image ),
  10303. reinterpret_cast<const VkImageSubresource *>( &subresource ),
  10304. reinterpret_cast<VkSubresourceLayout *>( &layout ) );
  10305. return layout;
  10306. }
  10307. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ImageView
  10308. Device::createImageView( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const & createInfo,
  10309. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10310. {
  10311. return VULKAN_HPP_RAII_NAMESPACE::ImageView( *this, createInfo, allocator );
  10312. }
  10313. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ShaderModule
  10314. Device::createShaderModule( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const & createInfo,
  10315. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10316. {
  10317. return VULKAN_HPP_RAII_NAMESPACE::ShaderModule( *this, createInfo, allocator );
  10318. }
  10319. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PipelineCache
  10320. Device::createPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const & createInfo,
  10321. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10322. {
  10323. return VULKAN_HPP_RAII_NAMESPACE::PipelineCache( *this, createInfo, allocator );
  10324. }
  10325. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<uint8_t> PipelineCache::getData() const
  10326. {
  10327. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineCacheData && "Function <vkGetPipelineCacheData> requires <VK_VERSION_1_0>" );
  10328. std::vector<uint8_t> data;
  10329. size_t dataSize;
  10330. VkResult result;
  10331. do
  10332. {
  10333. result =
  10334. getDispatcher()->vkGetPipelineCacheData( static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( m_pipelineCache ), &dataSize, nullptr );
  10335. if ( ( result == VK_SUCCESS ) && dataSize )
  10336. {
  10337. data.resize( dataSize );
  10338. result = getDispatcher()->vkGetPipelineCacheData(
  10339. static_cast<VkDevice>( m_device ), static_cast<VkPipelineCache>( m_pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
  10340. }
  10341. } while ( result == VK_INCOMPLETE );
  10342. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::getData" );
  10343. VULKAN_HPP_ASSERT( dataSize <= data.size() );
  10344. if ( dataSize < data.size() )
  10345. {
  10346. data.resize( dataSize );
  10347. }
  10348. return data;
  10349. }
  10350. VULKAN_HPP_INLINE void PipelineCache::merge( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches ) const
  10351. {
  10352. VULKAN_HPP_ASSERT( getDispatcher()->vkMergePipelineCaches && "Function <vkMergePipelineCaches> requires <VK_VERSION_1_0>" );
  10353. VkResult result = getDispatcher()->vkMergePipelineCaches( static_cast<VkDevice>( m_device ),
  10354. static_cast<VkPipelineCache>( m_pipelineCache ),
  10355. srcCaches.size(),
  10356. reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) );
  10357. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PipelineCache::merge" );
  10358. }
  10359. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> Device::createGraphicsPipelines(
  10360. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  10361. VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
  10362. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10363. {
  10364. return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator );
  10365. }
  10366. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createGraphicsPipeline(
  10367. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  10368. VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const & createInfo,
  10369. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10370. {
  10371. return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator );
  10372. }
  10373. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> Device::createComputePipelines(
  10374. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  10375. VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
  10376. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10377. {
  10378. return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator );
  10379. }
  10380. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline
  10381. Device::createComputePipeline( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  10382. VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const & createInfo,
  10383. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10384. {
  10385. return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator );
  10386. }
  10387. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PipelineLayout
  10388. Device::createPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const & createInfo,
  10389. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10390. {
  10391. return VULKAN_HPP_RAII_NAMESPACE::PipelineLayout( *this, createInfo, allocator );
  10392. }
  10393. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Sampler
  10394. Device::createSampler( VULKAN_HPP_NAMESPACE::SamplerCreateInfo const & createInfo,
  10395. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10396. {
  10397. return VULKAN_HPP_RAII_NAMESPACE::Sampler( *this, createInfo, allocator );
  10398. }
  10399. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout
  10400. Device::createDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const & createInfo,
  10401. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10402. {
  10403. return VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout( *this, createInfo, allocator );
  10404. }
  10405. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorPool
  10406. Device::createDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const & createInfo,
  10407. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10408. {
  10409. return VULKAN_HPP_RAII_NAMESPACE::DescriptorPool( *this, createInfo, allocator );
  10410. }
  10411. VULKAN_HPP_INLINE void DescriptorPool::reset( VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
  10412. {
  10413. VULKAN_HPP_ASSERT( getDispatcher()->vkResetDescriptorPool && "Function <vkResetDescriptorPool> requires <VK_VERSION_1_0>" );
  10414. getDispatcher()->vkResetDescriptorPool(
  10415. static_cast<VkDevice>( m_device ), static_cast<VkDescriptorPool>( m_descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) );
  10416. }
  10417. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::DescriptorSet>
  10418. Device::allocateDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const & allocateInfo ) const
  10419. {
  10420. return VULKAN_HPP_RAII_NAMESPACE::DescriptorSets( *this, allocateInfo );
  10421. }
  10422. VULKAN_HPP_INLINE void Device::updateDescriptorSets(
  10423. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
  10424. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies ) const VULKAN_HPP_NOEXCEPT
  10425. {
  10426. VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateDescriptorSets && "Function <vkUpdateDescriptorSets> requires <VK_VERSION_1_0>" );
  10427. getDispatcher()->vkUpdateDescriptorSets( static_cast<VkDevice>( m_device ),
  10428. descriptorWrites.size(),
  10429. reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ),
  10430. descriptorCopies.size(),
  10431. reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) );
  10432. }
  10433. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Framebuffer
  10434. Device::createFramebuffer( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const & createInfo,
  10435. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10436. {
  10437. return VULKAN_HPP_RAII_NAMESPACE::Framebuffer( *this, createInfo, allocator );
  10438. }
  10439. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass
  10440. Device::createRenderPass( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const & createInfo,
  10441. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10442. {
  10443. return VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, createInfo, allocator );
  10444. }
  10445. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D RenderPass::getRenderAreaGranularity() const VULKAN_HPP_NOEXCEPT
  10446. {
  10447. VULKAN_HPP_ASSERT( getDispatcher()->vkGetRenderAreaGranularity && "Function <vkGetRenderAreaGranularity> requires <VK_VERSION_1_0>" );
  10448. VULKAN_HPP_NAMESPACE::Extent2D granularity;
  10449. getDispatcher()->vkGetRenderAreaGranularity(
  10450. static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( m_renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) );
  10451. return granularity;
  10452. }
  10453. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CommandPool
  10454. Device::createCommandPool( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const & createInfo,
  10455. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  10456. {
  10457. return VULKAN_HPP_RAII_NAMESPACE::CommandPool( *this, createInfo, allocator );
  10458. }
  10459. VULKAN_HPP_INLINE void CommandPool::reset( VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags ) const
  10460. {
  10461. VULKAN_HPP_ASSERT( getDispatcher()->vkResetCommandPool && "Function <vkResetCommandPool> requires <VK_VERSION_1_0>" );
  10462. VkResult result = getDispatcher()->vkResetCommandPool(
  10463. static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) );
  10464. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandPool::reset" );
  10465. }
  10466. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::CommandBuffer>
  10467. Device::allocateCommandBuffers( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const & allocateInfo ) const
  10468. {
  10469. return VULKAN_HPP_RAII_NAMESPACE::CommandBuffers( *this, allocateInfo );
  10470. }
  10471. VULKAN_HPP_INLINE void CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo ) const
  10472. {
  10473. VULKAN_HPP_ASSERT( getDispatcher()->vkBeginCommandBuffer && "Function <vkBeginCommandBuffer> requires <VK_VERSION_1_0>" );
  10474. VkResult result = getDispatcher()->vkBeginCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10475. reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) );
  10476. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
  10477. }
  10478. VULKAN_HPP_INLINE void CommandBuffer::end() const
  10479. {
  10480. VULKAN_HPP_ASSERT( getDispatcher()->vkEndCommandBuffer && "Function <vkEndCommandBuffer> requires <VK_VERSION_1_0>" );
  10481. VkResult result = getDispatcher()->vkEndCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ) );
  10482. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
  10483. }
  10484. VULKAN_HPP_INLINE void CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags ) const
  10485. {
  10486. VULKAN_HPP_ASSERT( getDispatcher()->vkResetCommandBuffer && "Function <vkResetCommandBuffer> requires <VK_VERSION_1_0>" );
  10487. VkResult result =
  10488. getDispatcher()->vkResetCommandBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCommandBufferResetFlags>( flags ) );
  10489. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
  10490. }
  10491. VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  10492. VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT
  10493. {
  10494. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindPipeline && "Function <vkCmdBindPipeline> requires <VK_VERSION_1_0>" );
  10495. getDispatcher()->vkCmdBindPipeline(
  10496. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
  10497. }
  10498. VULKAN_HPP_INLINE void
  10499. CommandBuffer::setViewport( uint32_t firstViewport,
  10500. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT
  10501. {
  10502. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewport && "Function <vkCmdSetViewport> requires <VK_VERSION_1_0>" );
  10503. getDispatcher()->vkCmdSetViewport(
  10504. static_cast<VkCommandBuffer>( m_commandBuffer ), firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
  10505. }
  10506. VULKAN_HPP_INLINE void
  10507. CommandBuffer::setScissor( uint32_t firstScissor,
  10508. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT
  10509. {
  10510. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissor && "Function <vkCmdSetScissor> requires <VK_VERSION_1_0>" );
  10511. getDispatcher()->vkCmdSetScissor(
  10512. static_cast<VkCommandBuffer>( m_commandBuffer ), firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
  10513. }
  10514. VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth ) const VULKAN_HPP_NOEXCEPT
  10515. {
  10516. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineWidth && "Function <vkCmdSetLineWidth> requires <VK_VERSION_1_0>" );
  10517. getDispatcher()->vkCmdSetLineWidth( static_cast<VkCommandBuffer>( m_commandBuffer ), lineWidth );
  10518. }
  10519. VULKAN_HPP_INLINE void
  10520. CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT
  10521. {
  10522. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBias && "Function <vkCmdSetDepthBias> requires <VK_VERSION_1_0>" );
  10523. getDispatcher()->vkCmdSetDepthBias( static_cast<VkCommandBuffer>( m_commandBuffer ), depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
  10524. }
  10525. VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT
  10526. {
  10527. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetBlendConstants && "Function <vkCmdSetBlendConstants> requires <VK_VERSION_1_0>" );
  10528. getDispatcher()->vkCmdSetBlendConstants( static_cast<VkCommandBuffer>( m_commandBuffer ), blendConstants );
  10529. }
  10530. VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT
  10531. {
  10532. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBounds && "Function <vkCmdSetDepthBounds> requires <VK_VERSION_1_0>" );
  10533. getDispatcher()->vkCmdSetDepthBounds( static_cast<VkCommandBuffer>( m_commandBuffer ), minDepthBounds, maxDepthBounds );
  10534. }
  10535. VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
  10536. uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT
  10537. {
  10538. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilCompareMask && "Function <vkCmdSetStencilCompareMask> requires <VK_VERSION_1_0>" );
  10539. getDispatcher()->vkCmdSetStencilCompareMask( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
  10540. }
  10541. VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT
  10542. {
  10543. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilWriteMask && "Function <vkCmdSetStencilWriteMask> requires <VK_VERSION_1_0>" );
  10544. getDispatcher()->vkCmdSetStencilWriteMask( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
  10545. }
  10546. VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT
  10547. {
  10548. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilReference && "Function <vkCmdSetStencilReference> requires <VK_VERSION_1_0>" );
  10549. getDispatcher()->vkCmdSetStencilReference( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkStencilFaceFlags>( faceMask ), reference );
  10550. }
  10551. VULKAN_HPP_INLINE void
  10552. CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  10553. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  10554. uint32_t firstSet,
  10555. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
  10556. VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets ) const VULKAN_HPP_NOEXCEPT
  10557. {
  10558. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorSets && "Function <vkCmdBindDescriptorSets> requires <VK_VERSION_1_0>" );
  10559. getDispatcher()->vkCmdBindDescriptorSets( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10560. static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
  10561. static_cast<VkPipelineLayout>( layout ),
  10562. firstSet,
  10563. descriptorSets.size(),
  10564. reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ),
  10565. dynamicOffsets.size(),
  10566. dynamicOffsets.data() );
  10567. }
  10568. VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
  10569. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  10570. VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT
  10571. {
  10572. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindIndexBuffer && "Function <vkCmdBindIndexBuffer> requires <VK_VERSION_1_0>" );
  10573. getDispatcher()->vkCmdBindIndexBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10574. static_cast<VkBuffer>( buffer ),
  10575. static_cast<VkDeviceSize>( offset ),
  10576. static_cast<VkIndexType>( indexType ) );
  10577. }
  10578. VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding,
  10579. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
  10580. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets ) const
  10581. {
  10582. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers && "Function <vkCmdBindVertexBuffers> requires <VK_VERSION_1_0>" );
  10583. if ( buffers.size() != offsets.size() )
  10584. {
  10585. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
  10586. }
  10587. getDispatcher()->vkCmdBindVertexBuffers( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10588. firstBinding,
  10589. buffers.size(),
  10590. reinterpret_cast<const VkBuffer *>( buffers.data() ),
  10591. reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
  10592. }
  10593. VULKAN_HPP_INLINE void
  10594. CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
  10595. {
  10596. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDraw && "Function <vkCmdDraw> requires <VK_VERSION_1_0>" );
  10597. getDispatcher()->vkCmdDraw( static_cast<VkCommandBuffer>( m_commandBuffer ), vertexCount, instanceCount, firstVertex, firstInstance );
  10598. }
  10599. VULKAN_HPP_INLINE void CommandBuffer::drawIndexed(
  10600. uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
  10601. {
  10602. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexed && "Function <vkCmdDrawIndexed> requires <VK_VERSION_1_0>" );
  10603. getDispatcher()->vkCmdDrawIndexed( static_cast<VkCommandBuffer>( m_commandBuffer ), indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
  10604. }
  10605. VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
  10606. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  10607. uint32_t drawCount,
  10608. uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  10609. {
  10610. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirect && "Function <vkCmdDrawIndirect> requires <VK_VERSION_1_0>" );
  10611. getDispatcher()->vkCmdDrawIndirect(
  10612. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
  10613. }
  10614. VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
  10615. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  10616. uint32_t drawCount,
  10617. uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  10618. {
  10619. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexedIndirect && "Function <vkCmdDrawIndexedIndirect> requires <VK_VERSION_1_0>" );
  10620. getDispatcher()->vkCmdDrawIndexedIndirect(
  10621. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
  10622. }
  10623. VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
  10624. {
  10625. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatch && "Function <vkCmdDispatch> requires <VK_VERSION_1_0>" );
  10626. getDispatcher()->vkCmdDispatch( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ );
  10627. }
  10628. VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
  10629. VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT
  10630. {
  10631. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchIndirect && "Function <vkCmdDispatchIndirect> requires <VK_VERSION_1_0>" );
  10632. getDispatcher()->vkCmdDispatchIndirect(
  10633. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
  10634. }
  10635. VULKAN_HPP_INLINE void
  10636. CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
  10637. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  10638. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions ) const VULKAN_HPP_NOEXCEPT
  10639. {
  10640. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer && "Function <vkCmdCopyBuffer> requires <VK_VERSION_1_0>" );
  10641. getDispatcher()->vkCmdCopyBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10642. static_cast<VkBuffer>( srcBuffer ),
  10643. static_cast<VkBuffer>( dstBuffer ),
  10644. regions.size(),
  10645. reinterpret_cast<const VkBufferCopy *>( regions.data() ) );
  10646. }
  10647. VULKAN_HPP_INLINE void
  10648. CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
  10649. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  10650. VULKAN_HPP_NAMESPACE::Image dstImage,
  10651. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  10652. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT
  10653. {
  10654. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage && "Function <vkCmdCopyImage> requires <VK_VERSION_1_0>" );
  10655. getDispatcher()->vkCmdCopyImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10656. static_cast<VkImage>( srcImage ),
  10657. static_cast<VkImageLayout>( srcImageLayout ),
  10658. static_cast<VkImage>( dstImage ),
  10659. static_cast<VkImageLayout>( dstImageLayout ),
  10660. regions.size(),
  10661. reinterpret_cast<const VkImageCopy *>( regions.data() ) );
  10662. }
  10663. VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
  10664. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  10665. VULKAN_HPP_NAMESPACE::Image dstImage,
  10666. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  10667. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
  10668. VULKAN_HPP_NAMESPACE::Filter filter ) const VULKAN_HPP_NOEXCEPT
  10669. {
  10670. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage && "Function <vkCmdBlitImage> requires <VK_VERSION_1_0>" );
  10671. getDispatcher()->vkCmdBlitImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10672. static_cast<VkImage>( srcImage ),
  10673. static_cast<VkImageLayout>( srcImageLayout ),
  10674. static_cast<VkImage>( dstImage ),
  10675. static_cast<VkImageLayout>( dstImageLayout ),
  10676. regions.size(),
  10677. reinterpret_cast<const VkImageBlit *>( regions.data() ),
  10678. static_cast<VkFilter>( filter ) );
  10679. }
  10680. VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage(
  10681. VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
  10682. VULKAN_HPP_NAMESPACE::Image dstImage,
  10683. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  10684. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT
  10685. {
  10686. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage && "Function <vkCmdCopyBufferToImage> requires <VK_VERSION_1_0>" );
  10687. getDispatcher()->vkCmdCopyBufferToImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10688. static_cast<VkBuffer>( srcBuffer ),
  10689. static_cast<VkImage>( dstImage ),
  10690. static_cast<VkImageLayout>( dstImageLayout ),
  10691. regions.size(),
  10692. reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
  10693. }
  10694. VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer(
  10695. VULKAN_HPP_NAMESPACE::Image srcImage,
  10696. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  10697. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  10698. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions ) const VULKAN_HPP_NOEXCEPT
  10699. {
  10700. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer && "Function <vkCmdCopyImageToBuffer> requires <VK_VERSION_1_0>" );
  10701. getDispatcher()->vkCmdCopyImageToBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10702. static_cast<VkImage>( srcImage ),
  10703. static_cast<VkImageLayout>( srcImageLayout ),
  10704. static_cast<VkBuffer>( dstBuffer ),
  10705. regions.size(),
  10706. reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
  10707. }
  10708. template <typename DataType>
  10709. VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  10710. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  10711. VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data ) const VULKAN_HPP_NOEXCEPT
  10712. {
  10713. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdUpdateBuffer && "Function <vkCmdUpdateBuffer> requires <VK_VERSION_1_0>" );
  10714. getDispatcher()->vkCmdUpdateBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10715. static_cast<VkBuffer>( dstBuffer ),
  10716. static_cast<VkDeviceSize>( dstOffset ),
  10717. data.size() * sizeof( DataType ),
  10718. reinterpret_cast<const void *>( data.data() ) );
  10719. }
  10720. VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  10721. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  10722. VULKAN_HPP_NAMESPACE::DeviceSize size,
  10723. uint32_t data ) const VULKAN_HPP_NOEXCEPT
  10724. {
  10725. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdFillBuffer && "Function <vkCmdFillBuffer> requires <VK_VERSION_1_0>" );
  10726. getDispatcher()->vkCmdFillBuffer( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10727. static_cast<VkBuffer>( dstBuffer ),
  10728. static_cast<VkDeviceSize>( dstOffset ),
  10729. static_cast<VkDeviceSize>( size ),
  10730. data );
  10731. }
  10732. VULKAN_HPP_INLINE void CommandBuffer::clearColorImage(
  10733. VULKAN_HPP_NAMESPACE::Image image,
  10734. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
  10735. const VULKAN_HPP_NAMESPACE::ClearColorValue & color,
  10736. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const VULKAN_HPP_NOEXCEPT
  10737. {
  10738. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdClearColorImage && "Function <vkCmdClearColorImage> requires <VK_VERSION_1_0>" );
  10739. getDispatcher()->vkCmdClearColorImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10740. static_cast<VkImage>( image ),
  10741. static_cast<VkImageLayout>( imageLayout ),
  10742. reinterpret_cast<const VkClearColorValue *>( &color ),
  10743. ranges.size(),
  10744. reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
  10745. }
  10746. VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage(
  10747. VULKAN_HPP_NAMESPACE::Image image,
  10748. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
  10749. const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,
  10750. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges ) const VULKAN_HPP_NOEXCEPT
  10751. {
  10752. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdClearDepthStencilImage && "Function <vkCmdClearDepthStencilImage> requires <VK_VERSION_1_0>" );
  10753. getDispatcher()->vkCmdClearDepthStencilImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10754. static_cast<VkImage>( image ),
  10755. static_cast<VkImageLayout>( imageLayout ),
  10756. reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ),
  10757. ranges.size(),
  10758. reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
  10759. }
  10760. VULKAN_HPP_INLINE void
  10761. CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
  10762. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects ) const VULKAN_HPP_NOEXCEPT
  10763. {
  10764. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdClearAttachments && "Function <vkCmdClearAttachments> requires <VK_VERSION_1_0>" );
  10765. getDispatcher()->vkCmdClearAttachments( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10766. attachments.size(),
  10767. reinterpret_cast<const VkClearAttachment *>( attachments.data() ),
  10768. rects.size(),
  10769. reinterpret_cast<const VkClearRect *>( rects.data() ) );
  10770. }
  10771. VULKAN_HPP_INLINE void
  10772. CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
  10773. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  10774. VULKAN_HPP_NAMESPACE::Image dstImage,
  10775. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  10776. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions ) const VULKAN_HPP_NOEXCEPT
  10777. {
  10778. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage && "Function <vkCmdResolveImage> requires <VK_VERSION_1_0>" );
  10779. getDispatcher()->vkCmdResolveImage( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10780. static_cast<VkImage>( srcImage ),
  10781. static_cast<VkImageLayout>( srcImageLayout ),
  10782. static_cast<VkImage>( dstImage ),
  10783. static_cast<VkImageLayout>( dstImageLayout ),
  10784. regions.size(),
  10785. reinterpret_cast<const VkImageResolve *>( regions.data() ) );
  10786. }
  10787. VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event,
  10788. VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
  10789. {
  10790. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent && "Function <vkCmdSetEvent> requires <VK_VERSION_1_0>" );
  10791. getDispatcher()->vkCmdSetEvent(
  10792. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
  10793. }
  10794. VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event,
  10795. VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
  10796. {
  10797. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent && "Function <vkCmdResetEvent> requires <VK_VERSION_1_0>" );
  10798. getDispatcher()->vkCmdResetEvent(
  10799. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
  10800. }
  10801. VULKAN_HPP_INLINE void CommandBuffer::waitEvents(
  10802. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
  10803. VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
  10804. VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
  10805. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
  10806. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
  10807. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
  10808. {
  10809. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents && "Function <vkCmdWaitEvents> requires <VK_VERSION_1_0>" );
  10810. getDispatcher()->vkCmdWaitEvents( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10811. events.size(),
  10812. reinterpret_cast<const VkEvent *>( events.data() ),
  10813. static_cast<VkPipelineStageFlags>( srcStageMask ),
  10814. static_cast<VkPipelineStageFlags>( dstStageMask ),
  10815. memoryBarriers.size(),
  10816. reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ),
  10817. bufferMemoryBarriers.size(),
  10818. reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ),
  10819. imageMemoryBarriers.size(),
  10820. reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
  10821. }
  10822. VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier(
  10823. VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
  10824. VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
  10825. VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
  10826. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
  10827. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
  10828. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
  10829. {
  10830. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier && "Function <vkCmdPipelineBarrier> requires <VK_VERSION_1_0>" );
  10831. getDispatcher()->vkCmdPipelineBarrier( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10832. static_cast<VkPipelineStageFlags>( srcStageMask ),
  10833. static_cast<VkPipelineStageFlags>( dstStageMask ),
  10834. static_cast<VkDependencyFlags>( dependencyFlags ),
  10835. memoryBarriers.size(),
  10836. reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ),
  10837. bufferMemoryBarriers.size(),
  10838. reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ),
  10839. imageMemoryBarriers.size(),
  10840. reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
  10841. }
  10842. VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  10843. uint32_t query,
  10844. VULKAN_HPP_NAMESPACE::QueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT
  10845. {
  10846. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginQuery && "Function <vkCmdBeginQuery> requires <VK_VERSION_1_0>" );
  10847. getDispatcher()->vkCmdBeginQuery(
  10848. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
  10849. }
  10850. VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
  10851. {
  10852. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndQuery && "Function <vkCmdEndQuery> requires <VK_VERSION_1_0>" );
  10853. getDispatcher()->vkCmdEndQuery( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query );
  10854. }
  10855. VULKAN_HPP_INLINE void
  10856. CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
  10857. {
  10858. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetQueryPool && "Function <vkCmdResetQueryPool> requires <VK_VERSION_1_0>" );
  10859. getDispatcher()->vkCmdResetQueryPool( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
  10860. }
  10861. VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
  10862. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  10863. uint32_t query ) const VULKAN_HPP_NOEXCEPT
  10864. {
  10865. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp && "Function <vkCmdWriteTimestamp> requires <VK_VERSION_1_0>" );
  10866. getDispatcher()->vkCmdWriteTimestamp(
  10867. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
  10868. }
  10869. VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  10870. uint32_t firstQuery,
  10871. uint32_t queryCount,
  10872. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  10873. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  10874. VULKAN_HPP_NAMESPACE::DeviceSize stride,
  10875. VULKAN_HPP_NAMESPACE::QueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
  10876. {
  10877. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyQueryPoolResults && "Function <vkCmdCopyQueryPoolResults> requires <VK_VERSION_1_0>" );
  10878. getDispatcher()->vkCmdCopyQueryPoolResults( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10879. static_cast<VkQueryPool>( queryPool ),
  10880. firstQuery,
  10881. queryCount,
  10882. static_cast<VkBuffer>( dstBuffer ),
  10883. static_cast<VkDeviceSize>( dstOffset ),
  10884. static_cast<VkDeviceSize>( stride ),
  10885. static_cast<VkQueryResultFlags>( flags ) );
  10886. }
  10887. template <typename ValuesType>
  10888. VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  10889. VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
  10890. uint32_t offset,
  10891. VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values ) const VULKAN_HPP_NOEXCEPT
  10892. {
  10893. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushConstants && "Function <vkCmdPushConstants> requires <VK_VERSION_1_0>" );
  10894. getDispatcher()->vkCmdPushConstants( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10895. static_cast<VkPipelineLayout>( layout ),
  10896. static_cast<VkShaderStageFlags>( stageFlags ),
  10897. offset,
  10898. values.size() * sizeof( ValuesType ),
  10899. reinterpret_cast<const void *>( values.data() ) );
  10900. }
  10901. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
  10902. VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT
  10903. {
  10904. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass && "Function <vkCmdBeginRenderPass> requires <VK_VERSION_1_0>" );
  10905. getDispatcher()->vkCmdBeginRenderPass( static_cast<VkCommandBuffer>( m_commandBuffer ),
  10906. reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
  10907. static_cast<VkSubpassContents>( contents ) );
  10908. }
  10909. VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents ) const VULKAN_HPP_NOEXCEPT
  10910. {
  10911. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass && "Function <vkCmdNextSubpass> requires <VK_VERSION_1_0>" );
  10912. getDispatcher()->vkCmdNextSubpass( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkSubpassContents>( contents ) );
  10913. }
  10914. VULKAN_HPP_INLINE void CommandBuffer::endRenderPass() const VULKAN_HPP_NOEXCEPT
  10915. {
  10916. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass && "Function <vkCmdEndRenderPass> requires <VK_VERSION_1_0>" );
  10917. getDispatcher()->vkCmdEndRenderPass( static_cast<VkCommandBuffer>( m_commandBuffer ) );
  10918. }
  10919. VULKAN_HPP_INLINE void CommandBuffer::executeCommands(
  10920. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers ) const VULKAN_HPP_NOEXCEPT
  10921. {
  10922. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdExecuteCommands && "Function <vkCmdExecuteCommands> requires <VK_VERSION_1_0>" );
  10923. getDispatcher()->vkCmdExecuteCommands(
  10924. static_cast<VkCommandBuffer>( m_commandBuffer ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
  10925. }
  10926. //=== VK_VERSION_1_1 ===
  10927. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t Context::enumerateInstanceVersion() const
  10928. {
  10929. VULKAN_HPP_ASSERT( getDispatcher()->vkEnumerateInstanceVersion && "Function <vkEnumerateInstanceVersion> requires <VK_VERSION_1_1>" );
  10930. uint32_t apiVersion;
  10931. VkResult result = getDispatcher()->vkEnumerateInstanceVersion( &apiVersion );
  10932. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Context::enumerateInstanceVersion" );
  10933. return apiVersion;
  10934. }
  10935. VULKAN_HPP_INLINE void
  10936. Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const
  10937. {
  10938. VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory2 && "Function <vkBindBufferMemory2> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" );
  10939. VkResult result = getDispatcher()->vkBindBufferMemory2(
  10940. static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) );
  10941. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
  10942. }
  10943. VULKAN_HPP_INLINE void Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const
  10944. {
  10945. VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory2 && "Function <vkBindImageMemory2> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" );
  10946. VkResult result = getDispatcher()->vkBindImageMemory2(
  10947. static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) );
  10948. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
  10949. }
  10950. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
  10951. Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT
  10952. {
  10953. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPeerMemoryFeatures &&
  10954. "Function <vkGetDeviceGroupPeerMemoryFeatures> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" );
  10955. VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
  10956. getDispatcher()->vkGetDeviceGroupPeerMemoryFeatures( static_cast<VkDevice>( m_device ),
  10957. heapIndex,
  10958. localDeviceIndex,
  10959. remoteDeviceIndex,
  10960. reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
  10961. return peerMemoryFeatures;
  10962. }
  10963. VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
  10964. {
  10965. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDeviceMask && "Function <vkCmdSetDeviceMask> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" );
  10966. getDispatcher()->vkCmdSetDeviceMask( static_cast<VkCommandBuffer>( m_commandBuffer ), deviceMask );
  10967. }
  10968. VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX,
  10969. uint32_t baseGroupY,
  10970. uint32_t baseGroupZ,
  10971. uint32_t groupCountX,
  10972. uint32_t groupCountY,
  10973. uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
  10974. {
  10975. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchBase && "Function <vkCmdDispatchBase> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" );
  10976. getDispatcher()->vkCmdDispatchBase(
  10977. static_cast<VkCommandBuffer>( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
  10978. }
  10979. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> Instance::enumeratePhysicalDeviceGroups() const
  10980. {
  10981. VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceGroups &&
  10982. "Function <vkEnumeratePhysicalDeviceGroups> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" );
  10983. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> physicalDeviceGroupProperties;
  10984. uint32_t physicalDeviceGroupCount;
  10985. VkResult result;
  10986. do
  10987. {
  10988. result = getDispatcher()->vkEnumeratePhysicalDeviceGroups( static_cast<VkInstance>( m_instance ), &physicalDeviceGroupCount, nullptr );
  10989. if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
  10990. {
  10991. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  10992. result =
  10993. getDispatcher()->vkEnumeratePhysicalDeviceGroups( static_cast<VkInstance>( m_instance ),
  10994. &physicalDeviceGroupCount,
  10995. reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
  10996. }
  10997. } while ( result == VK_INCOMPLETE );
  10998. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
  10999. VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
  11000. if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
  11001. {
  11002. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  11003. }
  11004. return physicalDeviceGroupProperties;
  11005. }
  11006. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  11007. Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
  11008. {
  11009. VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2 &&
  11010. "Function <vkGetImageMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
  11011. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  11012. getDispatcher()->vkGetImageMemoryRequirements2( static_cast<VkDevice>( m_device ),
  11013. reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
  11014. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  11015. return memoryRequirements;
  11016. }
  11017. template <typename X, typename Y, typename... Z>
  11018. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  11019. Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
  11020. {
  11021. VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2 &&
  11022. "Function <vkGetImageMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
  11023. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  11024. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  11025. getDispatcher()->vkGetImageMemoryRequirements2( static_cast<VkDevice>( m_device ),
  11026. reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
  11027. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  11028. return structureChain;
  11029. }
  11030. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  11031. Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
  11032. {
  11033. VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2 &&
  11034. "Function <vkGetBufferMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
  11035. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  11036. getDispatcher()->vkGetBufferMemoryRequirements2( static_cast<VkDevice>( m_device ),
  11037. reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
  11038. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  11039. return memoryRequirements;
  11040. }
  11041. template <typename X, typename Y, typename... Z>
  11042. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  11043. Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
  11044. {
  11045. VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2 &&
  11046. "Function <vkGetBufferMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
  11047. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  11048. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  11049. getDispatcher()->vkGetBufferMemoryRequirements2( static_cast<VkDevice>( m_device ),
  11050. reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
  11051. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  11052. return structureChain;
  11053. }
  11054. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
  11055. Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const
  11056. {
  11057. VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSparseMemoryRequirements2 &&
  11058. "Function <vkGetImageSparseMemoryRequirements2> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
  11059. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements;
  11060. uint32_t sparseMemoryRequirementCount;
  11061. getDispatcher()->vkGetImageSparseMemoryRequirements2(
  11062. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
  11063. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  11064. getDispatcher()->vkGetImageSparseMemoryRequirements2( static_cast<VkDevice>( m_device ),
  11065. reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
  11066. &sparseMemoryRequirementCount,
  11067. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  11068. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  11069. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  11070. {
  11071. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  11072. }
  11073. return sparseMemoryRequirements;
  11074. }
  11075. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2() const VULKAN_HPP_NOEXCEPT
  11076. {
  11077. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2 &&
  11078. "Function <vkGetPhysicalDeviceFeatures2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  11079. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
  11080. getDispatcher()->vkGetPhysicalDeviceFeatures2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  11081. reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
  11082. return features;
  11083. }
  11084. template <typename X, typename Y, typename... Z>
  11085. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2() const VULKAN_HPP_NOEXCEPT
  11086. {
  11087. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2 &&
  11088. "Function <vkGetPhysicalDeviceFeatures2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  11089. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  11090. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
  11091. getDispatcher()->vkGetPhysicalDeviceFeatures2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  11092. reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
  11093. return structureChain;
  11094. }
  11095. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2() const VULKAN_HPP_NOEXCEPT
  11096. {
  11097. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2 &&
  11098. "Function <vkGetPhysicalDeviceProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  11099. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
  11100. getDispatcher()->vkGetPhysicalDeviceProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  11101. reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
  11102. return properties;
  11103. }
  11104. template <typename X, typename Y, typename... Z>
  11105. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> PhysicalDevice::getProperties2() const VULKAN_HPP_NOEXCEPT
  11106. {
  11107. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2 &&
  11108. "Function <vkGetPhysicalDeviceProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  11109. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  11110. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
  11111. getDispatcher()->vkGetPhysicalDeviceProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  11112. reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
  11113. return structureChain;
  11114. }
  11115. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
  11116. PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
  11117. {
  11118. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2 &&
  11119. "Function <vkGetPhysicalDeviceFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  11120. VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
  11121. getDispatcher()->vkGetPhysicalDeviceFormatProperties2(
  11122. static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
  11123. return formatProperties;
  11124. }
  11125. template <typename X, typename Y, typename... Z>
  11126. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  11127. PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
  11128. {
  11129. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2 &&
  11130. "Function <vkGetPhysicalDeviceFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  11131. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  11132. VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
  11133. getDispatcher()->vkGetPhysicalDeviceFormatProperties2(
  11134. static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
  11135. return structureChain;
  11136. }
  11137. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties2
  11138. PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const
  11139. {
  11140. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2 &&
  11141. "Function <vkGetPhysicalDeviceImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  11142. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
  11143. VkResult result =
  11144. getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  11145. reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
  11146. reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
  11147. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
  11148. return imageFormatProperties;
  11149. }
  11150. template <typename X, typename Y, typename... Z>
  11151. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  11152. PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const
  11153. {
  11154. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2 &&
  11155. "Function <vkGetPhysicalDeviceImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  11156. StructureChain<X, Y, Z...> structureChain;
  11157. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
  11158. VkResult result =
  11159. getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  11160. reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
  11161. reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
  11162. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
  11163. return structureChain;
  11164. }
  11165. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> PhysicalDevice::getQueueFamilyProperties2() const
  11166. {
  11167. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2 &&
  11168. "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  11169. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
  11170. uint32_t queueFamilyPropertyCount;
  11171. getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
  11172. queueFamilyProperties.resize( queueFamilyPropertyCount );
  11173. getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  11174. &queueFamilyPropertyCount,
  11175. reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  11176. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  11177. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  11178. {
  11179. queueFamilyProperties.resize( queueFamilyPropertyCount );
  11180. }
  11181. return queueFamilyProperties;
  11182. }
  11183. template <typename StructureChain>
  11184. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain> PhysicalDevice::getQueueFamilyProperties2() const
  11185. {
  11186. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2 &&
  11187. "Function <vkGetPhysicalDeviceQueueFamilyProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  11188. std::vector<StructureChain> structureChains;
  11189. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
  11190. uint32_t queueFamilyPropertyCount;
  11191. getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
  11192. structureChains.resize( queueFamilyPropertyCount );
  11193. queueFamilyProperties.resize( queueFamilyPropertyCount );
  11194. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  11195. {
  11196. queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
  11197. }
  11198. getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  11199. &queueFamilyPropertyCount,
  11200. reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  11201. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  11202. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  11203. {
  11204. structureChains.resize( queueFamilyPropertyCount );
  11205. }
  11206. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  11207. {
  11208. structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
  11209. }
  11210. return structureChains;
  11211. }
  11212. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
  11213. PhysicalDevice::getMemoryProperties2() const VULKAN_HPP_NOEXCEPT
  11214. {
  11215. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2 &&
  11216. "Function <vkGetPhysicalDeviceMemoryProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  11217. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
  11218. getDispatcher()->vkGetPhysicalDeviceMemoryProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  11219. reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
  11220. return memoryProperties;
  11221. }
  11222. template <typename X, typename Y, typename... Z>
  11223. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2() const VULKAN_HPP_NOEXCEPT
  11224. {
  11225. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2 &&
  11226. "Function <vkGetPhysicalDeviceMemoryProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  11227. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  11228. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
  11229. structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
  11230. getDispatcher()->vkGetPhysicalDeviceMemoryProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  11231. reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
  11232. return structureChain;
  11233. }
  11234. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>
  11235. PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const
  11236. {
  11237. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2 &&
  11238. "Function <vkGetPhysicalDeviceSparseImageFormatProperties2> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  11239. std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2> properties;
  11240. uint32_t propertyCount;
  11241. getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  11242. reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
  11243. &propertyCount,
  11244. nullptr );
  11245. properties.resize( propertyCount );
  11246. getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  11247. reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
  11248. &propertyCount,
  11249. reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
  11250. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  11251. if ( propertyCount < properties.size() )
  11252. {
  11253. properties.resize( propertyCount );
  11254. }
  11255. return properties;
  11256. }
  11257. VULKAN_HPP_INLINE void CommandPool::trim( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
  11258. {
  11259. VULKAN_HPP_ASSERT( getDispatcher()->vkTrimCommandPool && "Function <vkTrimCommandPool> requires <VK_KHR_maintenance1> or <VK_VERSION_1_1>" );
  11260. getDispatcher()->vkTrimCommandPool(
  11261. static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
  11262. }
  11263. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Queue Device::getQueue2( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const & queueInfo ) const
  11264. {
  11265. return VULKAN_HPP_RAII_NAMESPACE::Queue( *this, queueInfo );
  11266. }
  11267. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion
  11268. Device::createSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
  11269. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  11270. {
  11271. return VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion( *this, createInfo, allocator );
  11272. }
  11273. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate
  11274. Device::createDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
  11275. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  11276. {
  11277. return VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate( *this, createInfo, allocator );
  11278. }
  11279. template <typename DataType>
  11280. VULKAN_HPP_INLINE void DescriptorSet::updateWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  11281. DataType const & data ) const VULKAN_HPP_NOEXCEPT
  11282. {
  11283. VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateDescriptorSetWithTemplate &&
  11284. "Function <vkUpdateDescriptorSetWithTemplate> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" );
  11285. getDispatcher()->vkUpdateDescriptorSetWithTemplate( static_cast<VkDevice>( m_device ),
  11286. static_cast<VkDescriptorSet>( m_descriptorSet ),
  11287. static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
  11288. reinterpret_cast<const void *>( &data ) );
  11289. }
  11290. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties
  11291. PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT
  11292. {
  11293. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalBufferProperties &&
  11294. "Function <vkGetPhysicalDeviceExternalBufferProperties> requires <VK_KHR_external_memory_capabilities> or <VK_VERSION_1_1>" );
  11295. VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
  11296. getDispatcher()->vkGetPhysicalDeviceExternalBufferProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  11297. reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
  11298. reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
  11299. return externalBufferProperties;
  11300. }
  11301. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
  11302. PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT
  11303. {
  11304. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalFenceProperties &&
  11305. "Function <vkGetPhysicalDeviceExternalFenceProperties> requires <VK_KHR_external_fence_capabilities> or <VK_VERSION_1_1>" );
  11306. VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
  11307. getDispatcher()->vkGetPhysicalDeviceExternalFenceProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  11308. reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
  11309. reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
  11310. return externalFenceProperties;
  11311. }
  11312. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties(
  11313. const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT
  11314. {
  11315. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalSemaphoreProperties &&
  11316. "Function <vkGetPhysicalDeviceExternalSemaphoreProperties> requires <VK_KHR_external_semaphore_capabilities> or <VK_VERSION_1_1>" );
  11317. VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
  11318. getDispatcher()->vkGetPhysicalDeviceExternalSemaphoreProperties(
  11319. static_cast<VkPhysicalDevice>( m_physicalDevice ),
  11320. reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
  11321. reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
  11322. return externalSemaphoreProperties;
  11323. }
  11324. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
  11325. Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
  11326. {
  11327. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupport &&
  11328. "Function <vkGetDescriptorSetLayoutSupport> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" );
  11329. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
  11330. getDispatcher()->vkGetDescriptorSetLayoutSupport( static_cast<VkDevice>( m_device ),
  11331. reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
  11332. reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
  11333. return support;
  11334. }
  11335. template <typename X, typename Y, typename... Z>
  11336. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  11337. Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
  11338. {
  11339. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupport &&
  11340. "Function <vkGetDescriptorSetLayoutSupport> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" );
  11341. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  11342. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
  11343. getDispatcher()->vkGetDescriptorSetLayoutSupport( static_cast<VkDevice>( m_device ),
  11344. reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
  11345. reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
  11346. return structureChain;
  11347. }
  11348. //=== VK_VERSION_1_2 ===
  11349. VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
  11350. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  11351. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  11352. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  11353. uint32_t maxDrawCount,
  11354. uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  11355. {
  11356. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCount &&
  11357. "Function <vkCmdDrawIndirectCount> requires <VK_AMD_draw_indirect_count> or <VK_KHR_draw_indirect_count> or <VK_VERSION_1_2>" );
  11358. getDispatcher()->vkCmdDrawIndirectCount( static_cast<VkCommandBuffer>( m_commandBuffer ),
  11359. static_cast<VkBuffer>( buffer ),
  11360. static_cast<VkDeviceSize>( offset ),
  11361. static_cast<VkBuffer>( countBuffer ),
  11362. static_cast<VkDeviceSize>( countBufferOffset ),
  11363. maxDrawCount,
  11364. stride );
  11365. }
  11366. VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
  11367. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  11368. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  11369. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  11370. uint32_t maxDrawCount,
  11371. uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  11372. {
  11373. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndexedIndirectCount &&
  11374. "Function <vkCmdDrawIndexedIndirectCount> requires <VK_AMD_draw_indirect_count> or <VK_KHR_draw_indirect_count> or <VK_VERSION_1_2>" );
  11375. getDispatcher()->vkCmdDrawIndexedIndirectCount( static_cast<VkCommandBuffer>( m_commandBuffer ),
  11376. static_cast<VkBuffer>( buffer ),
  11377. static_cast<VkDeviceSize>( offset ),
  11378. static_cast<VkBuffer>( countBuffer ),
  11379. static_cast<VkDeviceSize>( countBufferOffset ),
  11380. maxDrawCount,
  11381. stride );
  11382. }
  11383. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass
  11384. Device::createRenderPass2( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo,
  11385. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  11386. {
  11387. return VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, createInfo, allocator );
  11388. }
  11389. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
  11390. const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
  11391. {
  11392. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass2 &&
  11393. "Function <vkCmdBeginRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
  11394. getDispatcher()->vkCmdBeginRenderPass2( static_cast<VkCommandBuffer>( m_commandBuffer ),
  11395. reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
  11396. reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
  11397. }
  11398. VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
  11399. const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT
  11400. {
  11401. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass2 && "Function <vkCmdNextSubpass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
  11402. getDispatcher()->vkCmdNextSubpass2( static_cast<VkCommandBuffer>( m_commandBuffer ),
  11403. reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ),
  11404. reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  11405. }
  11406. VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT
  11407. {
  11408. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass2 && "Function <vkCmdEndRenderPass2> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
  11409. getDispatcher()->vkCmdEndRenderPass2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  11410. }
  11411. VULKAN_HPP_INLINE void QueryPool::reset( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
  11412. {
  11413. VULKAN_HPP_ASSERT( getDispatcher()->vkResetQueryPool && "Function <vkResetQueryPool> requires <VK_EXT_host_query_reset> or <VK_VERSION_1_2>" );
  11414. getDispatcher()->vkResetQueryPool( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( m_queryPool ), firstQuery, queryCount );
  11415. }
  11416. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValue() const
  11417. {
  11418. VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreCounterValue &&
  11419. "Function <vkGetSemaphoreCounterValue> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" );
  11420. uint64_t value;
  11421. VkResult result = getDispatcher()->vkGetSemaphoreCounterValue( static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( m_semaphore ), &value );
  11422. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValue" );
  11423. return value;
  11424. }
  11425. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,
  11426. uint64_t timeout ) const
  11427. {
  11428. VULKAN_HPP_ASSERT( getDispatcher()->vkWaitSemaphores && "Function <vkWaitSemaphores> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" );
  11429. VkResult result =
  11430. getDispatcher()->vkWaitSemaphores( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
  11431. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  11432. VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores",
  11433. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
  11434. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  11435. }
  11436. VULKAN_HPP_INLINE void Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const
  11437. {
  11438. VULKAN_HPP_ASSERT( getDispatcher()->vkSignalSemaphore && "Function <vkSignalSemaphore> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" );
  11439. VkResult result = getDispatcher()->vkSignalSemaphore( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
  11440. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
  11441. }
  11442. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
  11443. Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
  11444. {
  11445. VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferDeviceAddress &&
  11446. "Function <vkGetBufferDeviceAddress> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
  11447. VkDeviceAddress result =
  11448. getDispatcher()->vkGetBufferDeviceAddress( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  11449. return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
  11450. }
  11451. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
  11452. Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
  11453. {
  11454. VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureAddress &&
  11455. "Function <vkGetBufferOpaqueCaptureAddress> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
  11456. uint64_t result =
  11457. getDispatcher()->vkGetBufferOpaqueCaptureAddress( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  11458. return result;
  11459. }
  11460. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
  11461. Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
  11462. {
  11463. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddress &&
  11464. "Function <vkGetDeviceMemoryOpaqueCaptureAddress> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
  11465. uint64_t result = getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddress( static_cast<VkDevice>( m_device ),
  11466. reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
  11467. return result;
  11468. }
  11469. //=== VK_VERSION_1_3 ===
  11470. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> PhysicalDevice::getToolProperties() const
  11471. {
  11472. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceToolProperties &&
  11473. "Function <vkGetPhysicalDeviceToolProperties> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" );
  11474. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> toolProperties;
  11475. uint32_t toolCount;
  11476. VkResult result;
  11477. do
  11478. {
  11479. result = getDispatcher()->vkGetPhysicalDeviceToolProperties( static_cast<VkPhysicalDevice>( m_physicalDevice ), &toolCount, nullptr );
  11480. if ( ( result == VK_SUCCESS ) && toolCount )
  11481. {
  11482. toolProperties.resize( toolCount );
  11483. result = getDispatcher()->vkGetPhysicalDeviceToolProperties(
  11484. static_cast<VkPhysicalDevice>( m_physicalDevice ), &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
  11485. }
  11486. } while ( result == VK_INCOMPLETE );
  11487. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
  11488. VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
  11489. if ( toolCount < toolProperties.size() )
  11490. {
  11491. toolProperties.resize( toolCount );
  11492. }
  11493. return toolProperties;
  11494. }
  11495. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot
  11496. Device::createPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
  11497. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  11498. {
  11499. return VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot( *this, createInfo, allocator );
  11500. }
  11501. VULKAN_HPP_INLINE void Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
  11502. uint64_t objectHandle,
  11503. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  11504. uint64_t data ) const
  11505. {
  11506. VULKAN_HPP_ASSERT( getDispatcher()->vkSetPrivateData && "Function <vkSetPrivateData> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
  11507. VkResult result = getDispatcher()->vkSetPrivateData(
  11508. static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
  11509. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" );
  11510. }
  11511. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
  11512. uint64_t objectHandle,
  11513. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT
  11514. {
  11515. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPrivateData && "Function <vkGetPrivateData> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
  11516. uint64_t data;
  11517. getDispatcher()->vkGetPrivateData(
  11518. static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
  11519. return data;
  11520. }
  11521. VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event,
  11522. const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT
  11523. {
  11524. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent2 && "Function <vkCmdSetEvent2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
  11525. getDispatcher()->vkCmdSetEvent2(
  11526. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
  11527. }
  11528. VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event,
  11529. VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT
  11530. {
  11531. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent2 && "Function <vkCmdResetEvent2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
  11532. getDispatcher()->vkCmdResetEvent2(
  11533. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
  11534. }
  11535. VULKAN_HPP_INLINE void
  11536. CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
  11537. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const
  11538. {
  11539. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents2 && "Function <vkCmdWaitEvents2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
  11540. if ( events.size() != dependencyInfos.size() )
  11541. {
  11542. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" );
  11543. }
  11544. getDispatcher()->vkCmdWaitEvents2( static_cast<VkCommandBuffer>( m_commandBuffer ),
  11545. events.size(),
  11546. reinterpret_cast<const VkEvent *>( events.data() ),
  11547. reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
  11548. }
  11549. VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT
  11550. {
  11551. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier2 && "Function <vkCmdPipelineBarrier2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
  11552. getDispatcher()->vkCmdPipelineBarrier2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
  11553. }
  11554. VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
  11555. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  11556. uint32_t query ) const VULKAN_HPP_NOEXCEPT
  11557. {
  11558. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp2 && "Function <vkCmdWriteTimestamp2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
  11559. getDispatcher()->vkCmdWriteTimestamp2(
  11560. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
  11561. }
  11562. VULKAN_HPP_INLINE void Queue::submit2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
  11563. VULKAN_HPP_NAMESPACE::Fence fence ) const
  11564. {
  11565. VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit2 && "Function <vkQueueSubmit2> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
  11566. VkResult result = getDispatcher()->vkQueueSubmit2(
  11567. static_cast<VkQueue>( m_queue ), submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) );
  11568. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" );
  11569. }
  11570. VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT
  11571. {
  11572. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer2 && "Function <vkCmdCopyBuffer2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
  11573. getDispatcher()->vkCmdCopyBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
  11574. }
  11575. VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT
  11576. {
  11577. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage2 && "Function <vkCmdCopyImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
  11578. getDispatcher()->vkCmdCopyImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
  11579. }
  11580. VULKAN_HPP_INLINE void
  11581. CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
  11582. {
  11583. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage2 &&
  11584. "Function <vkCmdCopyBufferToImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
  11585. getDispatcher()->vkCmdCopyBufferToImage2( static_cast<VkCommandBuffer>( m_commandBuffer ),
  11586. reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
  11587. }
  11588. VULKAN_HPP_INLINE void
  11589. CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
  11590. {
  11591. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer2 &&
  11592. "Function <vkCmdCopyImageToBuffer2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
  11593. getDispatcher()->vkCmdCopyImageToBuffer2( static_cast<VkCommandBuffer>( m_commandBuffer ),
  11594. reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
  11595. }
  11596. VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT
  11597. {
  11598. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage2 && "Function <vkCmdBlitImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
  11599. getDispatcher()->vkCmdBlitImage2( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
  11600. }
  11601. VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT
  11602. {
  11603. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage2 && "Function <vkCmdResolveImage2> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
  11604. getDispatcher()->vkCmdResolveImage2( static_cast<VkCommandBuffer>( m_commandBuffer ),
  11605. reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
  11606. }
  11607. VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT
  11608. {
  11609. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRendering && "Function <vkCmdBeginRendering> requires <VK_KHR_dynamic_rendering> or <VK_VERSION_1_3>" );
  11610. getDispatcher()->vkCmdBeginRendering( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
  11611. }
  11612. VULKAN_HPP_INLINE void CommandBuffer::endRendering() const VULKAN_HPP_NOEXCEPT
  11613. {
  11614. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRendering && "Function <vkCmdEndRendering> requires <VK_KHR_dynamic_rendering> or <VK_VERSION_1_3>" );
  11615. getDispatcher()->vkCmdEndRendering( static_cast<VkCommandBuffer>( m_commandBuffer ) );
  11616. }
  11617. VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
  11618. {
  11619. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullMode &&
  11620. "Function <vkCmdSetCullMode> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  11621. getDispatcher()->vkCmdSetCullMode( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) );
  11622. }
  11623. VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
  11624. {
  11625. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFace &&
  11626. "Function <vkCmdSetFrontFace> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  11627. getDispatcher()->vkCmdSetFrontFace( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) );
  11628. }
  11629. VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
  11630. {
  11631. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopology &&
  11632. "Function <vkCmdSetPrimitiveTopology> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  11633. getDispatcher()->vkCmdSetPrimitiveTopology( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) );
  11634. }
  11635. VULKAN_HPP_INLINE void
  11636. CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT
  11637. {
  11638. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCount &&
  11639. "Function <vkCmdSetViewportWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  11640. getDispatcher()->vkCmdSetViewportWithCount(
  11641. static_cast<VkCommandBuffer>( m_commandBuffer ), viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
  11642. }
  11643. VULKAN_HPP_INLINE void
  11644. CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT
  11645. {
  11646. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCount &&
  11647. "Function <vkCmdSetScissorWithCount> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  11648. getDispatcher()->vkCmdSetScissorWithCount(
  11649. static_cast<VkCommandBuffer>( m_commandBuffer ), scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
  11650. }
  11651. VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding,
  11652. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
  11653. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
  11654. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
  11655. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides ) const
  11656. {
  11657. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2 &&
  11658. "Function <vkCmdBindVertexBuffers2> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  11659. if ( buffers.size() != offsets.size() )
  11660. {
  11661. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" );
  11662. }
  11663. if ( !sizes.empty() && buffers.size() != sizes.size() )
  11664. {
  11665. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" );
  11666. }
  11667. if ( !strides.empty() && buffers.size() != strides.size() )
  11668. {
  11669. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" );
  11670. }
  11671. getDispatcher()->vkCmdBindVertexBuffers2( static_cast<VkCommandBuffer>( m_commandBuffer ),
  11672. firstBinding,
  11673. buffers.size(),
  11674. reinterpret_cast<const VkBuffer *>( buffers.data() ),
  11675. reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
  11676. reinterpret_cast<const VkDeviceSize *>( sizes.data() ),
  11677. reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
  11678. }
  11679. VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
  11680. {
  11681. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnable &&
  11682. "Function <vkCmdSetDepthTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  11683. getDispatcher()->vkCmdSetDepthTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) );
  11684. }
  11685. VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
  11686. {
  11687. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnable &&
  11688. "Function <vkCmdSetDepthWriteEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  11689. getDispatcher()->vkCmdSetDepthWriteEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) );
  11690. }
  11691. VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
  11692. {
  11693. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOp &&
  11694. "Function <vkCmdSetDepthCompareOp> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  11695. getDispatcher()->vkCmdSetDepthCompareOp( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) );
  11696. }
  11697. VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
  11698. {
  11699. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnable &&
  11700. "Function <vkCmdSetDepthBoundsTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  11701. getDispatcher()->vkCmdSetDepthBoundsTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) );
  11702. }
  11703. VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
  11704. {
  11705. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnable &&
  11706. "Function <vkCmdSetStencilTestEnable> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  11707. getDispatcher()->vkCmdSetStencilTestEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) );
  11708. }
  11709. VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
  11710. VULKAN_HPP_NAMESPACE::StencilOp failOp,
  11711. VULKAN_HPP_NAMESPACE::StencilOp passOp,
  11712. VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
  11713. VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
  11714. {
  11715. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOp &&
  11716. "Function <vkCmdSetStencilOp> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  11717. getDispatcher()->vkCmdSetStencilOp( static_cast<VkCommandBuffer>( m_commandBuffer ),
  11718. static_cast<VkStencilFaceFlags>( faceMask ),
  11719. static_cast<VkStencilOp>( failOp ),
  11720. static_cast<VkStencilOp>( passOp ),
  11721. static_cast<VkStencilOp>( depthFailOp ),
  11722. static_cast<VkCompareOp>( compareOp ) );
  11723. }
  11724. VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
  11725. {
  11726. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizerDiscardEnable &&
  11727. "Function <vkCmdSetRasterizerDiscardEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  11728. getDispatcher()->vkCmdSetRasterizerDiscardEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) );
  11729. }
  11730. VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
  11731. {
  11732. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnable &&
  11733. "Function <vkCmdSetDepthBiasEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  11734. getDispatcher()->vkCmdSetDepthBiasEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) );
  11735. }
  11736. VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
  11737. {
  11738. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveRestartEnable &&
  11739. "Function <vkCmdSetPrimitiveRestartEnable> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  11740. getDispatcher()->vkCmdSetPrimitiveRestartEnable( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) );
  11741. }
  11742. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  11743. Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
  11744. {
  11745. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirements &&
  11746. "Function <vkGetDeviceBufferMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
  11747. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  11748. getDispatcher()->vkGetDeviceBufferMemoryRequirements( static_cast<VkDevice>( m_device ),
  11749. reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
  11750. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  11751. return memoryRequirements;
  11752. }
  11753. template <typename X, typename Y, typename... Z>
  11754. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  11755. Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
  11756. {
  11757. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirements &&
  11758. "Function <vkGetDeviceBufferMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
  11759. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  11760. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  11761. getDispatcher()->vkGetDeviceBufferMemoryRequirements( static_cast<VkDevice>( m_device ),
  11762. reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
  11763. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  11764. return structureChain;
  11765. }
  11766. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  11767. Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
  11768. {
  11769. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirements &&
  11770. "Function <vkGetDeviceImageMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
  11771. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  11772. getDispatcher()->vkGetDeviceImageMemoryRequirements( static_cast<VkDevice>( m_device ),
  11773. reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
  11774. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  11775. return memoryRequirements;
  11776. }
  11777. template <typename X, typename Y, typename... Z>
  11778. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  11779. Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
  11780. {
  11781. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirements &&
  11782. "Function <vkGetDeviceImageMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
  11783. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  11784. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  11785. getDispatcher()->vkGetDeviceImageMemoryRequirements( static_cast<VkDevice>( m_device ),
  11786. reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
  11787. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  11788. return structureChain;
  11789. }
  11790. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
  11791. Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const
  11792. {
  11793. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSparseMemoryRequirements &&
  11794. "Function <vkGetDeviceImageSparseMemoryRequirements> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
  11795. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements;
  11796. uint32_t sparseMemoryRequirementCount;
  11797. getDispatcher()->vkGetDeviceImageSparseMemoryRequirements(
  11798. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
  11799. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  11800. getDispatcher()->vkGetDeviceImageSparseMemoryRequirements( static_cast<VkDevice>( m_device ),
  11801. reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
  11802. &sparseMemoryRequirementCount,
  11803. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  11804. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  11805. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  11806. {
  11807. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  11808. }
  11809. return sparseMemoryRequirements;
  11810. }
  11811. //=== VK_KHR_surface ===
  11812. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex,
  11813. VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
  11814. {
  11815. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR && "Function <vkGetPhysicalDeviceSurfaceSupportKHR> requires <VK_KHR_surface>" );
  11816. VULKAN_HPP_NAMESPACE::Bool32 supported;
  11817. VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceSupportKHR(
  11818. static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( &supported ) );
  11819. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" );
  11820. return supported;
  11821. }
  11822. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR
  11823. PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
  11824. {
  11825. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR &&
  11826. "Function <vkGetPhysicalDeviceSurfaceCapabilitiesKHR> requires <VK_KHR_surface>" );
  11827. VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
  11828. VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  11829. static_cast<VkSurfaceKHR>( surface ),
  11830. reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) );
  11831. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" );
  11832. return surfaceCapabilities;
  11833. }
  11834. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>
  11835. PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
  11836. {
  11837. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR && "Function <vkGetPhysicalDeviceSurfaceFormatsKHR> requires <VK_KHR_surface>" );
  11838. std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR> surfaceFormats;
  11839. uint32_t surfaceFormatCount;
  11840. VkResult result;
  11841. do
  11842. {
  11843. result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR(
  11844. static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr );
  11845. if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
  11846. {
  11847. surfaceFormats.resize( surfaceFormatCount );
  11848. result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormatsKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  11849. static_cast<VkSurfaceKHR>( surface ),
  11850. &surfaceFormatCount,
  11851. reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) );
  11852. }
  11853. } while ( result == VK_INCOMPLETE );
  11854. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
  11855. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  11856. if ( surfaceFormatCount < surfaceFormats.size() )
  11857. {
  11858. surfaceFormats.resize( surfaceFormatCount );
  11859. }
  11860. return surfaceFormats;
  11861. }
  11862. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR>
  11863. PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
  11864. {
  11865. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR &&
  11866. "Function <vkGetPhysicalDeviceSurfacePresentModesKHR> requires <VK_KHR_surface>" );
  11867. std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR> presentModes;
  11868. uint32_t presentModeCount;
  11869. VkResult result;
  11870. do
  11871. {
  11872. result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR(
  11873. static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr );
  11874. if ( ( result == VK_SUCCESS ) && presentModeCount )
  11875. {
  11876. presentModes.resize( presentModeCount );
  11877. result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  11878. static_cast<VkSurfaceKHR>( surface ),
  11879. &presentModeCount,
  11880. reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
  11881. }
  11882. } while ( result == VK_INCOMPLETE );
  11883. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
  11884. VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
  11885. if ( presentModeCount < presentModes.size() )
  11886. {
  11887. presentModes.resize( presentModeCount );
  11888. }
  11889. return presentModes;
  11890. }
  11891. //=== VK_KHR_swapchain ===
  11892. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR
  11893. Device::createSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
  11894. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  11895. {
  11896. return VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR( *this, createInfo, allocator );
  11897. }
  11898. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::Image> SwapchainKHR::getImages() const
  11899. {
  11900. VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainImagesKHR && "Function <vkGetSwapchainImagesKHR> requires <VK_KHR_swapchain>" );
  11901. std::vector<VULKAN_HPP_NAMESPACE::Image> swapchainImages;
  11902. uint32_t swapchainImageCount;
  11903. VkResult result;
  11904. do
  11905. {
  11906. result = getDispatcher()->vkGetSwapchainImagesKHR(
  11907. static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), &swapchainImageCount, nullptr );
  11908. if ( ( result == VK_SUCCESS ) && swapchainImageCount )
  11909. {
  11910. swapchainImages.resize( swapchainImageCount );
  11911. result = getDispatcher()->vkGetSwapchainImagesKHR( static_cast<VkDevice>( m_device ),
  11912. static_cast<VkSwapchainKHR>( m_swapchain ),
  11913. &swapchainImageCount,
  11914. reinterpret_cast<VkImage *>( swapchainImages.data() ) );
  11915. }
  11916. } while ( result == VK_INCOMPLETE );
  11917. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getImages" );
  11918. VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
  11919. if ( swapchainImageCount < swapchainImages.size() )
  11920. {
  11921. swapchainImages.resize( swapchainImageCount );
  11922. }
  11923. return swapchainImages;
  11924. }
  11925. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, uint32_t>
  11926. SwapchainKHR::acquireNextImage( uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence ) const
  11927. {
  11928. VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImageKHR && "Function <vkAcquireNextImageKHR> requires <VK_KHR_swapchain>" );
  11929. uint32_t imageIndex;
  11930. VkResult result = getDispatcher()->vkAcquireNextImageKHR( static_cast<VkDevice>( m_device ),
  11931. static_cast<VkSwapchainKHR>( m_swapchain ),
  11932. timeout,
  11933. static_cast<VkSemaphore>( semaphore ),
  11934. static_cast<VkFence>( fence ),
  11935. &imageIndex );
  11936. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  11937. VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireNextImage",
  11938. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  11939. VULKAN_HPP_NAMESPACE::Result::eTimeout,
  11940. VULKAN_HPP_NAMESPACE::Result::eNotReady,
  11941. VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  11942. return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
  11943. }
  11944. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo ) const
  11945. {
  11946. VULKAN_HPP_ASSERT( getDispatcher()->vkQueuePresentKHR && "Function <vkQueuePresentKHR> requires <VK_KHR_swapchain>" );
  11947. VkResult result = getDispatcher()->vkQueuePresentKHR( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) );
  11948. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  11949. VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR",
  11950. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  11951. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  11952. }
  11953. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR Device::getGroupPresentCapabilitiesKHR() const
  11954. {
  11955. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR &&
  11956. "Function <vkGetDeviceGroupPresentCapabilitiesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" );
  11957. VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
  11958. VkResult result = getDispatcher()->vkGetDeviceGroupPresentCapabilitiesKHR(
  11959. static_cast<VkDevice>( m_device ), reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) );
  11960. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" );
  11961. return deviceGroupPresentCapabilities;
  11962. }
  11963. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR
  11964. Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
  11965. {
  11966. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR &&
  11967. "Function <vkGetDeviceGroupSurfacePresentModesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" );
  11968. VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
  11969. VkResult result = getDispatcher()->vkGetDeviceGroupSurfacePresentModesKHR(
  11970. static_cast<VkDevice>( m_device ), static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
  11971. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" );
  11972. return modes;
  11973. }
  11974. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::Rect2D>
  11975. PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
  11976. {
  11977. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR &&
  11978. "Function <vkGetPhysicalDevicePresentRectanglesKHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" );
  11979. std::vector<VULKAN_HPP_NAMESPACE::Rect2D> rects;
  11980. uint32_t rectCount;
  11981. VkResult result;
  11982. do
  11983. {
  11984. result = getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR(
  11985. static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr );
  11986. if ( ( result == VK_SUCCESS ) && rectCount )
  11987. {
  11988. rects.resize( rectCount );
  11989. result = getDispatcher()->vkGetPhysicalDevicePresentRectanglesKHR(
  11990. static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) );
  11991. }
  11992. } while ( result == VK_INCOMPLETE );
  11993. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
  11994. VULKAN_HPP_ASSERT( rectCount <= rects.size() );
  11995. if ( rectCount < rects.size() )
  11996. {
  11997. rects.resize( rectCount );
  11998. }
  11999. return rects;
  12000. }
  12001. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, uint32_t>
  12002. Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo ) const
  12003. {
  12004. VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireNextImage2KHR && "Function <vkAcquireNextImage2KHR> requires <VK_KHR_device_group> or <VK_KHR_swapchain>" );
  12005. uint32_t imageIndex;
  12006. VkResult result = getDispatcher()->vkAcquireNextImage2KHR(
  12007. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex );
  12008. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12009. VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR",
  12010. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  12011. VULKAN_HPP_NAMESPACE::Result::eTimeout,
  12012. VULKAN_HPP_NAMESPACE::Result::eNotReady,
  12013. VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  12014. return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
  12015. }
  12016. //=== VK_KHR_display ===
  12017. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR> PhysicalDevice::getDisplayPropertiesKHR() const
  12018. {
  12019. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR &&
  12020. "Function <vkGetPhysicalDeviceDisplayPropertiesKHR> requires <VK_KHR_display>" );
  12021. std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR> properties;
  12022. uint32_t propertyCount;
  12023. VkResult result;
  12024. do
  12025. {
  12026. result = getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
  12027. if ( ( result == VK_SUCCESS ) && propertyCount )
  12028. {
  12029. properties.resize( propertyCount );
  12030. result = getDispatcher()->vkGetPhysicalDeviceDisplayPropertiesKHR(
  12031. static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) );
  12032. }
  12033. } while ( result == VK_INCOMPLETE );
  12034. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
  12035. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  12036. if ( propertyCount < properties.size() )
  12037. {
  12038. properties.resize( propertyCount );
  12039. }
  12040. return properties;
  12041. }
  12042. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR> PhysicalDevice::getDisplayPlanePropertiesKHR() const
  12043. {
  12044. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR &&
  12045. "Function <vkGetPhysicalDeviceDisplayPlanePropertiesKHR> requires <VK_KHR_display>" );
  12046. std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR> properties;
  12047. uint32_t propertyCount;
  12048. VkResult result;
  12049. do
  12050. {
  12051. result = getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
  12052. if ( ( result == VK_SUCCESS ) && propertyCount )
  12053. {
  12054. properties.resize( propertyCount );
  12055. result = getDispatcher()->vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
  12056. static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) );
  12057. }
  12058. } while ( result == VK_INCOMPLETE );
  12059. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
  12060. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  12061. if ( propertyCount < properties.size() )
  12062. {
  12063. properties.resize( propertyCount );
  12064. }
  12065. return properties;
  12066. }
  12067. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::DisplayKHR>
  12068. PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex ) const
  12069. {
  12070. return VULKAN_HPP_RAII_NAMESPACE::DisplayKHRs( *this, planeIndex );
  12071. }
  12072. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR> DisplayKHR::getModeProperties() const
  12073. {
  12074. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModePropertiesKHR && "Function <vkGetDisplayModePropertiesKHR> requires <VK_KHR_display>" );
  12075. std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR> properties;
  12076. uint32_t propertyCount;
  12077. VkResult result;
  12078. do
  12079. {
  12080. result = getDispatcher()->vkGetDisplayModePropertiesKHR(
  12081. static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ), &propertyCount, nullptr );
  12082. if ( ( result == VK_SUCCESS ) && propertyCount )
  12083. {
  12084. properties.resize( propertyCount );
  12085. result = getDispatcher()->vkGetDisplayModePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12086. static_cast<VkDisplayKHR>( m_display ),
  12087. &propertyCount,
  12088. reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) );
  12089. }
  12090. } while ( result == VK_INCOMPLETE );
  12091. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties" );
  12092. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  12093. if ( propertyCount < properties.size() )
  12094. {
  12095. properties.resize( propertyCount );
  12096. }
  12097. return properties;
  12098. }
  12099. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR
  12100. DisplayKHR::createMode( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const & createInfo,
  12101. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  12102. {
  12103. return VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR( *this, createInfo, allocator );
  12104. }
  12105. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR
  12106. DisplayModeKHR::getDisplayPlaneCapabilities( uint32_t planeIndex ) const
  12107. {
  12108. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayPlaneCapabilitiesKHR && "Function <vkGetDisplayPlaneCapabilitiesKHR> requires <VK_KHR_display>" );
  12109. VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
  12110. VkResult result = getDispatcher()->vkGetDisplayPlaneCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12111. static_cast<VkDisplayModeKHR>( m_displayModeKHR ),
  12112. planeIndex,
  12113. reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) );
  12114. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayModeKHR::getDisplayPlaneCapabilities" );
  12115. return capabilities;
  12116. }
  12117. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  12118. Instance::createDisplayPlaneSurfaceKHR( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const & createInfo,
  12119. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  12120. {
  12121. return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
  12122. }
  12123. //=== VK_KHR_display_swapchain ===
  12124. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR>
  12125. Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
  12126. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  12127. {
  12128. return VULKAN_HPP_RAII_NAMESPACE::SwapchainKHRs( *this, createInfos, allocator );
  12129. }
  12130. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR
  12131. Device::createSharedSwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const & createInfo,
  12132. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  12133. {
  12134. return VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR( *this, createInfo, allocator );
  12135. }
  12136. # if defined( VK_USE_PLATFORM_XLIB_KHR )
  12137. //=== VK_KHR_xlib_surface ===
  12138. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  12139. Instance::createXlibSurfaceKHR( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const & createInfo,
  12140. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  12141. {
  12142. return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
  12143. }
  12144. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
  12145. PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT
  12146. {
  12147. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR &&
  12148. "Function <vkGetPhysicalDeviceXlibPresentationSupportKHR> requires <VK_KHR_xlib_surface>" );
  12149. VkBool32 result =
  12150. getDispatcher()->vkGetPhysicalDeviceXlibPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &dpy, visualID );
  12151. return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
  12152. }
  12153. # endif /*VK_USE_PLATFORM_XLIB_KHR*/
  12154. # if defined( VK_USE_PLATFORM_XCB_KHR )
  12155. //=== VK_KHR_xcb_surface ===
  12156. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  12157. Instance::createXcbSurfaceKHR( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const & createInfo,
  12158. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  12159. {
  12160. return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
  12161. }
  12162. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR(
  12163. uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT
  12164. {
  12165. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR &&
  12166. "Function <vkGetPhysicalDeviceXcbPresentationSupportKHR> requires <VK_KHR_xcb_surface>" );
  12167. VkBool32 result = getDispatcher()->vkGetPhysicalDeviceXcbPresentationSupportKHR(
  12168. static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &connection, visual_id );
  12169. return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
  12170. }
  12171. # endif /*VK_USE_PLATFORM_XCB_KHR*/
  12172. # if defined( VK_USE_PLATFORM_WAYLAND_KHR )
  12173. //=== VK_KHR_wayland_surface ===
  12174. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  12175. Instance::createWaylandSurfaceKHR( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const & createInfo,
  12176. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  12177. {
  12178. return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
  12179. }
  12180. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
  12181. PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display ) const VULKAN_HPP_NOEXCEPT
  12182. {
  12183. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR &&
  12184. "Function <vkGetPhysicalDeviceWaylandPresentationSupportKHR> requires <VK_KHR_wayland_surface>" );
  12185. VkBool32 result =
  12186. getDispatcher()->vkGetPhysicalDeviceWaylandPresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &display );
  12187. return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
  12188. }
  12189. # endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  12190. # if defined( VK_USE_PLATFORM_ANDROID_KHR )
  12191. //=== VK_KHR_android_surface ===
  12192. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  12193. Instance::createAndroidSurfaceKHR( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const & createInfo,
  12194. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  12195. {
  12196. return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
  12197. }
  12198. # endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  12199. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  12200. //=== VK_KHR_win32_surface ===
  12201. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  12202. Instance::createWin32SurfaceKHR( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const & createInfo,
  12203. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  12204. {
  12205. return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
  12206. }
  12207. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
  12208. PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT
  12209. {
  12210. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR &&
  12211. "Function <vkGetPhysicalDeviceWin32PresentationSupportKHR> requires <VK_KHR_win32_surface>" );
  12212. VkBool32 result = getDispatcher()->vkGetPhysicalDeviceWin32PresentationSupportKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex );
  12213. return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
  12214. }
  12215. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  12216. //=== VK_EXT_debug_report ===
  12217. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT
  12218. Instance::createDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const & createInfo,
  12219. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  12220. {
  12221. return VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT( *this, createInfo, allocator );
  12222. }
  12223. VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
  12224. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_,
  12225. uint64_t object,
  12226. size_t location,
  12227. int32_t messageCode,
  12228. const std::string & layerPrefix,
  12229. const std::string & message ) const VULKAN_HPP_NOEXCEPT
  12230. {
  12231. VULKAN_HPP_ASSERT( getDispatcher()->vkDebugReportMessageEXT && "Function <vkDebugReportMessageEXT> requires <VK_EXT_debug_report>" );
  12232. getDispatcher()->vkDebugReportMessageEXT( static_cast<VkInstance>( m_instance ),
  12233. static_cast<VkDebugReportFlagsEXT>( flags ),
  12234. static_cast<VkDebugReportObjectTypeEXT>( objectType_ ),
  12235. object,
  12236. location,
  12237. messageCode,
  12238. layerPrefix.c_str(),
  12239. message.c_str() );
  12240. }
  12241. //=== VK_EXT_debug_marker ===
  12242. VULKAN_HPP_INLINE void Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo ) const
  12243. {
  12244. VULKAN_HPP_ASSERT( getDispatcher()->vkDebugMarkerSetObjectTagEXT && "Function <vkDebugMarkerSetObjectTagEXT> requires <VK_EXT_debug_marker>" );
  12245. VkResult result =
  12246. getDispatcher()->vkDebugMarkerSetObjectTagEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) );
  12247. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" );
  12248. }
  12249. VULKAN_HPP_INLINE void Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo ) const
  12250. {
  12251. VULKAN_HPP_ASSERT( getDispatcher()->vkDebugMarkerSetObjectNameEXT && "Function <vkDebugMarkerSetObjectNameEXT> requires <VK_EXT_debug_marker>" );
  12252. VkResult result = getDispatcher()->vkDebugMarkerSetObjectNameEXT( static_cast<VkDevice>( m_device ),
  12253. reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) );
  12254. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" );
  12255. }
  12256. VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT
  12257. {
  12258. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerBeginEXT && "Function <vkCmdDebugMarkerBeginEXT> requires <VK_EXT_debug_marker>" );
  12259. getDispatcher()->vkCmdDebugMarkerBeginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  12260. reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
  12261. }
  12262. VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT() const VULKAN_HPP_NOEXCEPT
  12263. {
  12264. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerEndEXT && "Function <vkCmdDebugMarkerEndEXT> requires <VK_EXT_debug_marker>" );
  12265. getDispatcher()->vkCmdDebugMarkerEndEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) );
  12266. }
  12267. VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo ) const VULKAN_HPP_NOEXCEPT
  12268. {
  12269. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDebugMarkerInsertEXT && "Function <vkCmdDebugMarkerInsertEXT> requires <VK_EXT_debug_marker>" );
  12270. getDispatcher()->vkCmdDebugMarkerInsertEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  12271. reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
  12272. }
  12273. //=== VK_KHR_video_queue ===
  12274. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR
  12275. PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const
  12276. {
  12277. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR &&
  12278. "Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> requires <VK_KHR_video_queue>" );
  12279. VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities;
  12280. VkResult result = getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12281. reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ),
  12282. reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) );
  12283. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
  12284. return capabilities;
  12285. }
  12286. template <typename X, typename Y, typename... Z>
  12287. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  12288. PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile ) const
  12289. {
  12290. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR &&
  12291. "Function <vkGetPhysicalDeviceVideoCapabilitiesKHR> requires <VK_KHR_video_queue>" );
  12292. StructureChain<X, Y, Z...> structureChain;
  12293. VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>();
  12294. VkResult result = getDispatcher()->vkGetPhysicalDeviceVideoCapabilitiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12295. reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ),
  12296. reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) );
  12297. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
  12298. return structureChain;
  12299. }
  12300. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>
  12301. PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo ) const
  12302. {
  12303. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR &&
  12304. "Function <vkGetPhysicalDeviceVideoFormatPropertiesKHR> requires <VK_KHR_video_queue>" );
  12305. std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR> videoFormatProperties;
  12306. uint32_t videoFormatPropertyCount;
  12307. VkResult result;
  12308. do
  12309. {
  12310. result = getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12311. reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
  12312. &videoFormatPropertyCount,
  12313. nullptr );
  12314. if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount )
  12315. {
  12316. videoFormatProperties.resize( videoFormatPropertyCount );
  12317. result =
  12318. getDispatcher()->vkGetPhysicalDeviceVideoFormatPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12319. reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
  12320. &videoFormatPropertyCount,
  12321. reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) );
  12322. }
  12323. } while ( result == VK_INCOMPLETE );
  12324. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
  12325. VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
  12326. if ( videoFormatPropertyCount < videoFormatProperties.size() )
  12327. {
  12328. videoFormatProperties.resize( videoFormatPropertyCount );
  12329. }
  12330. return videoFormatProperties;
  12331. }
  12332. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR
  12333. Device::createVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR const & createInfo,
  12334. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  12335. {
  12336. return VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR( *this, createInfo, allocator );
  12337. }
  12338. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR> VideoSessionKHR::getMemoryRequirements() const
  12339. {
  12340. VULKAN_HPP_ASSERT( getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR &&
  12341. "Function <vkGetVideoSessionMemoryRequirementsKHR> requires <VK_KHR_video_queue>" );
  12342. std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR> memoryRequirements;
  12343. uint32_t memoryRequirementsCount;
  12344. VkResult result;
  12345. do
  12346. {
  12347. result = getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR(
  12348. static_cast<VkDevice>( m_device ), static_cast<VkVideoSessionKHR>( m_videoSession ), &memoryRequirementsCount, nullptr );
  12349. if ( ( result == VK_SUCCESS ) && memoryRequirementsCount )
  12350. {
  12351. memoryRequirements.resize( memoryRequirementsCount );
  12352. result =
  12353. getDispatcher()->vkGetVideoSessionMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
  12354. static_cast<VkVideoSessionKHR>( m_videoSession ),
  12355. &memoryRequirementsCount,
  12356. reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) );
  12357. }
  12358. } while ( result == VK_INCOMPLETE );
  12359. VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() );
  12360. if ( memoryRequirementsCount < memoryRequirements.size() )
  12361. {
  12362. memoryRequirements.resize( memoryRequirementsCount );
  12363. }
  12364. return memoryRequirements;
  12365. }
  12366. VULKAN_HPP_INLINE void VideoSessionKHR::bindMemory(
  12367. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos ) const
  12368. {
  12369. VULKAN_HPP_ASSERT( getDispatcher()->vkBindVideoSessionMemoryKHR && "Function <vkBindVideoSessionMemoryKHR> requires <VK_KHR_video_queue>" );
  12370. VkResult result =
  12371. getDispatcher()->vkBindVideoSessionMemoryKHR( static_cast<VkDevice>( m_device ),
  12372. static_cast<VkVideoSessionKHR>( m_videoSession ),
  12373. bindSessionMemoryInfos.size(),
  12374. reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( bindSessionMemoryInfos.data() ) );
  12375. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::VideoSessionKHR::bindMemory" );
  12376. }
  12377. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR
  12378. Device::createVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR const & createInfo,
  12379. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  12380. {
  12381. return VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR( *this, createInfo, allocator );
  12382. }
  12383. VULKAN_HPP_INLINE void VideoSessionParametersKHR::update( const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo ) const
  12384. {
  12385. VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateVideoSessionParametersKHR && "Function <vkUpdateVideoSessionParametersKHR> requires <VK_KHR_video_queue>" );
  12386. VkResult result = getDispatcher()->vkUpdateVideoSessionParametersKHR( static_cast<VkDevice>( m_device ),
  12387. static_cast<VkVideoSessionParametersKHR>( m_videoSessionParameters ),
  12388. reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) );
  12389. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::VideoSessionParametersKHR::update" );
  12390. }
  12391. VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo ) const VULKAN_HPP_NOEXCEPT
  12392. {
  12393. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginVideoCodingKHR && "Function <vkCmdBeginVideoCodingKHR> requires <VK_KHR_video_queue>" );
  12394. getDispatcher()->vkCmdBeginVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  12395. reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) );
  12396. }
  12397. VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo ) const VULKAN_HPP_NOEXCEPT
  12398. {
  12399. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndVideoCodingKHR && "Function <vkCmdEndVideoCodingKHR> requires <VK_KHR_video_queue>" );
  12400. getDispatcher()->vkCmdEndVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  12401. reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) );
  12402. }
  12403. VULKAN_HPP_INLINE void
  12404. CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo ) const VULKAN_HPP_NOEXCEPT
  12405. {
  12406. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdControlVideoCodingKHR && "Function <vkCmdControlVideoCodingKHR> requires <VK_KHR_video_queue>" );
  12407. getDispatcher()->vkCmdControlVideoCodingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  12408. reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) );
  12409. }
  12410. //=== VK_KHR_video_decode_queue ===
  12411. VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo ) const VULKAN_HPP_NOEXCEPT
  12412. {
  12413. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecodeVideoKHR && "Function <vkCmdDecodeVideoKHR> requires <VK_KHR_video_decode_queue>" );
  12414. getDispatcher()->vkCmdDecodeVideoKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoDecodeInfoKHR *>( &decodeInfo ) );
  12415. }
  12416. //=== VK_EXT_transform_feedback ===
  12417. VULKAN_HPP_INLINE void
  12418. CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
  12419. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
  12420. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
  12421. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes ) const
  12422. {
  12423. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT &&
  12424. "Function <vkCmdBindTransformFeedbackBuffersEXT> requires <VK_EXT_transform_feedback>" );
  12425. if ( buffers.size() != offsets.size() )
  12426. {
  12427. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
  12428. }
  12429. if ( !sizes.empty() && buffers.size() != sizes.size() )
  12430. {
  12431. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
  12432. }
  12433. getDispatcher()->vkCmdBindTransformFeedbackBuffersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  12434. firstBinding,
  12435. buffers.size(),
  12436. reinterpret_cast<const VkBuffer *>( buffers.data() ),
  12437. reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
  12438. reinterpret_cast<const VkDeviceSize *>( sizes.data() ) );
  12439. }
  12440. VULKAN_HPP_INLINE void
  12441. CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
  12442. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
  12443. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets ) const
  12444. {
  12445. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginTransformFeedbackEXT && "Function <vkCmdBeginTransformFeedbackEXT> requires <VK_EXT_transform_feedback>" );
  12446. if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
  12447. {
  12448. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
  12449. }
  12450. getDispatcher()->vkCmdBeginTransformFeedbackEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  12451. firstCounterBuffer,
  12452. counterBuffers.size(),
  12453. reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
  12454. reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
  12455. }
  12456. VULKAN_HPP_INLINE void
  12457. CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer,
  12458. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
  12459. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets ) const
  12460. {
  12461. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndTransformFeedbackEXT && "Function <vkCmdEndTransformFeedbackEXT> requires <VK_EXT_transform_feedback>" );
  12462. if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
  12463. {
  12464. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
  12465. }
  12466. getDispatcher()->vkCmdEndTransformFeedbackEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  12467. firstCounterBuffer,
  12468. counterBuffers.size(),
  12469. reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
  12470. reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
  12471. }
  12472. VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  12473. uint32_t query,
  12474. VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
  12475. uint32_t index ) const VULKAN_HPP_NOEXCEPT
  12476. {
  12477. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginQueryIndexedEXT && "Function <vkCmdBeginQueryIndexedEXT> requires <VK_EXT_transform_feedback>" );
  12478. getDispatcher()->vkCmdBeginQueryIndexedEXT(
  12479. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
  12480. }
  12481. VULKAN_HPP_INLINE void
  12482. CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT
  12483. {
  12484. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndQueryIndexedEXT && "Function <vkCmdEndQueryIndexedEXT> requires <VK_EXT_transform_feedback>" );
  12485. getDispatcher()->vkCmdEndQueryIndexedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkQueryPool>( queryPool ), query, index );
  12486. }
  12487. VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount,
  12488. uint32_t firstInstance,
  12489. VULKAN_HPP_NAMESPACE::Buffer counterBuffer,
  12490. VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,
  12491. uint32_t counterOffset,
  12492. uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT
  12493. {
  12494. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectByteCountEXT && "Function <vkCmdDrawIndirectByteCountEXT> requires <VK_EXT_transform_feedback>" );
  12495. getDispatcher()->vkCmdDrawIndirectByteCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  12496. instanceCount,
  12497. firstInstance,
  12498. static_cast<VkBuffer>( counterBuffer ),
  12499. static_cast<VkDeviceSize>( counterBufferOffset ),
  12500. counterOffset,
  12501. vertexStride );
  12502. }
  12503. //=== VK_NVX_binary_import ===
  12504. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX
  12505. Device::createCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX const & createInfo,
  12506. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  12507. {
  12508. return VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX( *this, createInfo, allocator );
  12509. }
  12510. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX
  12511. Device::createCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX const & createInfo,
  12512. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  12513. {
  12514. return VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX( *this, createInfo, allocator );
  12515. }
  12516. VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo ) const VULKAN_HPP_NOEXCEPT
  12517. {
  12518. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCuLaunchKernelNVX && "Function <vkCmdCuLaunchKernelNVX> requires <VK_NVX_binary_import>" );
  12519. getDispatcher()->vkCmdCuLaunchKernelNVX( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) );
  12520. }
  12521. //=== VK_NVX_image_view_handle ===
  12522. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t
  12523. Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info ) const VULKAN_HPP_NOEXCEPT
  12524. {
  12525. VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewHandleNVX && "Function <vkGetImageViewHandleNVX> requires <VK_NVX_image_view_handle>" );
  12526. uint32_t result =
  12527. getDispatcher()->vkGetImageViewHandleNVX( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) );
  12528. return result;
  12529. }
  12530. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX ImageView::getAddressNVX() const
  12531. {
  12532. VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewAddressNVX && "Function <vkGetImageViewAddressNVX> requires <VK_NVX_image_view_handle>" );
  12533. VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties;
  12534. VkResult result = getDispatcher()->vkGetImageViewAddressNVX(
  12535. static_cast<VkDevice>( m_device ), static_cast<VkImageView>( m_imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) );
  12536. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::ImageView::getAddressNVX" );
  12537. return properties;
  12538. }
  12539. //=== VK_AMD_draw_indirect_count ===
  12540. VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
  12541. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  12542. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  12543. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  12544. uint32_t maxDrawCount,
  12545. uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  12546. {
  12547. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCountAMD &&
  12548. "Function <vkCmdDrawIndirectCountAMD> requires <VK_AMD_draw_indirect_count> or <VK_KHR_draw_indirect_count> or <VK_VERSION_1_2>" );
  12549. getDispatcher()->vkCmdDrawIndirectCountAMD( static_cast<VkCommandBuffer>( m_commandBuffer ),
  12550. static_cast<VkBuffer>( buffer ),
  12551. static_cast<VkDeviceSize>( offset ),
  12552. static_cast<VkBuffer>( countBuffer ),
  12553. static_cast<VkDeviceSize>( countBufferOffset ),
  12554. maxDrawCount,
  12555. stride );
  12556. }
  12557. VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
  12558. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  12559. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  12560. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  12561. uint32_t maxDrawCount,
  12562. uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  12563. {
  12564. VULKAN_HPP_ASSERT(
  12565. getDispatcher()->vkCmdDrawIndexedIndirectCountAMD &&
  12566. "Function <vkCmdDrawIndexedIndirectCountAMD> requires <VK_AMD_draw_indirect_count> or <VK_KHR_draw_indirect_count> or <VK_VERSION_1_2>" );
  12567. getDispatcher()->vkCmdDrawIndexedIndirectCountAMD( static_cast<VkCommandBuffer>( m_commandBuffer ),
  12568. static_cast<VkBuffer>( buffer ),
  12569. static_cast<VkDeviceSize>( offset ),
  12570. static_cast<VkBuffer>( countBuffer ),
  12571. static_cast<VkDeviceSize>( countBufferOffset ),
  12572. maxDrawCount,
  12573. stride );
  12574. }
  12575. //=== VK_AMD_shader_info ===
  12576. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<uint8_t> Pipeline::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
  12577. VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType ) const
  12578. {
  12579. VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderInfoAMD && "Function <vkGetShaderInfoAMD> requires <VK_AMD_shader_info>" );
  12580. std::vector<uint8_t> info;
  12581. size_t infoSize;
  12582. VkResult result;
  12583. do
  12584. {
  12585. result = getDispatcher()->vkGetShaderInfoAMD( static_cast<VkDevice>( m_device ),
  12586. static_cast<VkPipeline>( m_pipeline ),
  12587. static_cast<VkShaderStageFlagBits>( shaderStage ),
  12588. static_cast<VkShaderInfoTypeAMD>( infoType ),
  12589. &infoSize,
  12590. nullptr );
  12591. if ( ( result == VK_SUCCESS ) && infoSize )
  12592. {
  12593. info.resize( infoSize );
  12594. result = getDispatcher()->vkGetShaderInfoAMD( static_cast<VkDevice>( m_device ),
  12595. static_cast<VkPipeline>( m_pipeline ),
  12596. static_cast<VkShaderStageFlagBits>( shaderStage ),
  12597. static_cast<VkShaderInfoTypeAMD>( infoType ),
  12598. &infoSize,
  12599. reinterpret_cast<void *>( info.data() ) );
  12600. }
  12601. } while ( result == VK_INCOMPLETE );
  12602. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getShaderInfoAMD" );
  12603. VULKAN_HPP_ASSERT( infoSize <= info.size() );
  12604. if ( infoSize < info.size() )
  12605. {
  12606. info.resize( infoSize );
  12607. }
  12608. return info;
  12609. }
  12610. //=== VK_KHR_dynamic_rendering ===
  12611. VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT
  12612. {
  12613. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderingKHR &&
  12614. "Function <vkCmdBeginRenderingKHR> requires <VK_KHR_dynamic_rendering> or <VK_VERSION_1_3>" );
  12615. getDispatcher()->vkCmdBeginRenderingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
  12616. }
  12617. VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR() const VULKAN_HPP_NOEXCEPT
  12618. {
  12619. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderingKHR && "Function <vkCmdEndRenderingKHR> requires <VK_KHR_dynamic_rendering> or <VK_VERSION_1_3>" );
  12620. getDispatcher()->vkCmdEndRenderingKHR( static_cast<VkCommandBuffer>( m_commandBuffer ) );
  12621. }
  12622. # if defined( VK_USE_PLATFORM_GGP )
  12623. //=== VK_GGP_stream_descriptor_surface ===
  12624. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  12625. Instance::createStreamDescriptorSurfaceGGP( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const & createInfo,
  12626. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  12627. {
  12628. return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
  12629. }
  12630. # endif /*VK_USE_PLATFORM_GGP*/
  12631. //=== VK_NV_external_memory_capabilities ===
  12632. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV
  12633. PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format,
  12634. VULKAN_HPP_NAMESPACE::ImageType type,
  12635. VULKAN_HPP_NAMESPACE::ImageTiling tiling,
  12636. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  12637. VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
  12638. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType ) const
  12639. {
  12640. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV &&
  12641. "Function <vkGetPhysicalDeviceExternalImageFormatPropertiesNV> requires <VK_NV_external_memory_capabilities>" );
  12642. VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties;
  12643. VkResult result = getDispatcher()->vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
  12644. static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12645. static_cast<VkFormat>( format ),
  12646. static_cast<VkImageType>( type ),
  12647. static_cast<VkImageTiling>( tiling ),
  12648. static_cast<VkImageUsageFlags>( usage ),
  12649. static_cast<VkImageCreateFlags>( flags ),
  12650. static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
  12651. reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) );
  12652. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" );
  12653. return externalImageFormatProperties;
  12654. }
  12655. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  12656. //=== VK_NV_external_memory_win32 ===
  12657. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE DeviceMemory::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType ) const
  12658. {
  12659. VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandleNV && "Function <vkGetMemoryWin32HandleNV> requires <VK_NV_external_memory_win32>" );
  12660. HANDLE handle;
  12661. VkResult result = getDispatcher()->vkGetMemoryWin32HandleNV(
  12662. static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle );
  12663. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DeviceMemory::getMemoryWin32HandleNV" );
  12664. return handle;
  12665. }
  12666. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  12667. //=== VK_KHR_get_physical_device_properties2 ===
  12668. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR() const VULKAN_HPP_NOEXCEPT
  12669. {
  12670. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2KHR &&
  12671. "Function <vkGetPhysicalDeviceFeatures2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  12672. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
  12673. getDispatcher()->vkGetPhysicalDeviceFeatures2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12674. reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
  12675. return features;
  12676. }
  12677. template <typename X, typename Y, typename... Z>
  12678. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2KHR() const VULKAN_HPP_NOEXCEPT
  12679. {
  12680. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFeatures2KHR &&
  12681. "Function <vkGetPhysicalDeviceFeatures2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  12682. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  12683. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
  12684. getDispatcher()->vkGetPhysicalDeviceFeatures2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12685. reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
  12686. return structureChain;
  12687. }
  12688. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR() const VULKAN_HPP_NOEXCEPT
  12689. {
  12690. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2KHR &&
  12691. "Function <vkGetPhysicalDeviceProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  12692. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
  12693. getDispatcher()->vkGetPhysicalDeviceProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12694. reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
  12695. return properties;
  12696. }
  12697. template <typename X, typename Y, typename... Z>
  12698. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> PhysicalDevice::getProperties2KHR() const VULKAN_HPP_NOEXCEPT
  12699. {
  12700. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceProperties2KHR &&
  12701. "Function <vkGetPhysicalDeviceProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  12702. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  12703. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
  12704. getDispatcher()->vkGetPhysicalDeviceProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12705. reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
  12706. return structureChain;
  12707. }
  12708. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
  12709. PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
  12710. {
  12711. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR &&
  12712. "Function <vkGetPhysicalDeviceFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  12713. VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
  12714. getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR(
  12715. static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
  12716. return formatProperties;
  12717. }
  12718. template <typename X, typename Y, typename... Z>
  12719. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  12720. PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format ) const VULKAN_HPP_NOEXCEPT
  12721. {
  12722. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR &&
  12723. "Function <vkGetPhysicalDeviceFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  12724. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  12725. VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
  12726. getDispatcher()->vkGetPhysicalDeviceFormatProperties2KHR(
  12727. static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
  12728. return structureChain;
  12729. }
  12730. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageFormatProperties2
  12731. PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const
  12732. {
  12733. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR &&
  12734. "Function <vkGetPhysicalDeviceImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  12735. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
  12736. VkResult result =
  12737. getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12738. reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
  12739. reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
  12740. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
  12741. return imageFormatProperties;
  12742. }
  12743. template <typename X, typename Y, typename... Z>
  12744. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  12745. PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo ) const
  12746. {
  12747. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR &&
  12748. "Function <vkGetPhysicalDeviceImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  12749. StructureChain<X, Y, Z...> structureChain;
  12750. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
  12751. VkResult result =
  12752. getDispatcher()->vkGetPhysicalDeviceImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12753. reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
  12754. reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
  12755. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
  12756. return structureChain;
  12757. }
  12758. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> PhysicalDevice::getQueueFamilyProperties2KHR() const
  12759. {
  12760. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR &&
  12761. "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  12762. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
  12763. uint32_t queueFamilyPropertyCount;
  12764. getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
  12765. queueFamilyProperties.resize( queueFamilyPropertyCount );
  12766. getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12767. &queueFamilyPropertyCount,
  12768. reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  12769. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  12770. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  12771. {
  12772. queueFamilyProperties.resize( queueFamilyPropertyCount );
  12773. }
  12774. return queueFamilyProperties;
  12775. }
  12776. template <typename StructureChain>
  12777. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain> PhysicalDevice::getQueueFamilyProperties2KHR() const
  12778. {
  12779. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR &&
  12780. "Function <vkGetPhysicalDeviceQueueFamilyProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  12781. std::vector<StructureChain> structureChains;
  12782. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
  12783. uint32_t queueFamilyPropertyCount;
  12784. getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &queueFamilyPropertyCount, nullptr );
  12785. structureChains.resize( queueFamilyPropertyCount );
  12786. queueFamilyProperties.resize( queueFamilyPropertyCount );
  12787. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  12788. {
  12789. queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
  12790. }
  12791. getDispatcher()->vkGetPhysicalDeviceQueueFamilyProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12792. &queueFamilyPropertyCount,
  12793. reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  12794. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  12795. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  12796. {
  12797. structureChains.resize( queueFamilyPropertyCount );
  12798. }
  12799. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  12800. {
  12801. structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
  12802. }
  12803. return structureChains;
  12804. }
  12805. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
  12806. PhysicalDevice::getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT
  12807. {
  12808. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR &&
  12809. "Function <vkGetPhysicalDeviceMemoryProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  12810. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
  12811. getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12812. reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
  12813. return memoryProperties;
  12814. }
  12815. template <typename X, typename Y, typename... Z>
  12816. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2KHR() const VULKAN_HPP_NOEXCEPT
  12817. {
  12818. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR &&
  12819. "Function <vkGetPhysicalDeviceMemoryProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  12820. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  12821. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
  12822. structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
  12823. getDispatcher()->vkGetPhysicalDeviceMemoryProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12824. reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
  12825. return structureChain;
  12826. }
  12827. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>
  12828. PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo ) const
  12829. {
  12830. VULKAN_HPP_ASSERT(
  12831. getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR &&
  12832. "Function <vkGetPhysicalDeviceSparseImageFormatProperties2KHR> requires <VK_KHR_get_physical_device_properties2> or <VK_VERSION_1_1>" );
  12833. std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2> properties;
  12834. uint32_t propertyCount;
  12835. getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12836. reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
  12837. &propertyCount,
  12838. nullptr );
  12839. properties.resize( propertyCount );
  12840. getDispatcher()->vkGetPhysicalDeviceSparseImageFormatProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12841. reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
  12842. &propertyCount,
  12843. reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
  12844. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  12845. if ( propertyCount < properties.size() )
  12846. {
  12847. properties.resize( propertyCount );
  12848. }
  12849. return properties;
  12850. }
  12851. //=== VK_KHR_device_group ===
  12852. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags
  12853. Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex ) const VULKAN_HPP_NOEXCEPT
  12854. {
  12855. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR &&
  12856. "Function <vkGetDeviceGroupPeerMemoryFeaturesKHR> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" );
  12857. VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
  12858. getDispatcher()->vkGetDeviceGroupPeerMemoryFeaturesKHR( static_cast<VkDevice>( m_device ),
  12859. heapIndex,
  12860. localDeviceIndex,
  12861. remoteDeviceIndex,
  12862. reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
  12863. return peerMemoryFeatures;
  12864. }
  12865. VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
  12866. {
  12867. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDeviceMaskKHR && "Function <vkCmdSetDeviceMaskKHR> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" );
  12868. getDispatcher()->vkCmdSetDeviceMaskKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), deviceMask );
  12869. }
  12870. VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX,
  12871. uint32_t baseGroupY,
  12872. uint32_t baseGroupZ,
  12873. uint32_t groupCountX,
  12874. uint32_t groupCountY,
  12875. uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
  12876. {
  12877. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchBaseKHR && "Function <vkCmdDispatchBaseKHR> requires <VK_KHR_device_group> or <VK_VERSION_1_1>" );
  12878. getDispatcher()->vkCmdDispatchBaseKHR(
  12879. static_cast<VkCommandBuffer>( m_commandBuffer ), baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
  12880. }
  12881. # if defined( VK_USE_PLATFORM_VI_NN )
  12882. //=== VK_NN_vi_surface ===
  12883. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  12884. Instance::createViSurfaceNN( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const & createInfo,
  12885. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  12886. {
  12887. return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
  12888. }
  12889. # endif /*VK_USE_PLATFORM_VI_NN*/
  12890. //=== VK_KHR_maintenance1 ===
  12891. VULKAN_HPP_INLINE void CommandPool::trimKHR( VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
  12892. {
  12893. VULKAN_HPP_ASSERT( getDispatcher()->vkTrimCommandPoolKHR && "Function <vkTrimCommandPoolKHR> requires <VK_KHR_maintenance1> or <VK_VERSION_1_1>" );
  12894. getDispatcher()->vkTrimCommandPoolKHR(
  12895. static_cast<VkDevice>( m_device ), static_cast<VkCommandPool>( m_commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
  12896. }
  12897. //=== VK_KHR_device_group_creation ===
  12898. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> Instance::enumeratePhysicalDeviceGroupsKHR() const
  12899. {
  12900. VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR &&
  12901. "Function <vkEnumeratePhysicalDeviceGroupsKHR> requires <VK_KHR_device_group_creation> or <VK_VERSION_1_1>" );
  12902. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties> physicalDeviceGroupProperties;
  12903. uint32_t physicalDeviceGroupCount;
  12904. VkResult result;
  12905. do
  12906. {
  12907. result = getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR( static_cast<VkInstance>( m_instance ), &physicalDeviceGroupCount, nullptr );
  12908. if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
  12909. {
  12910. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  12911. result =
  12912. getDispatcher()->vkEnumeratePhysicalDeviceGroupsKHR( static_cast<VkInstance>( m_instance ),
  12913. &physicalDeviceGroupCount,
  12914. reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
  12915. }
  12916. } while ( result == VK_INCOMPLETE );
  12917. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
  12918. VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
  12919. if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
  12920. {
  12921. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  12922. }
  12923. return physicalDeviceGroupProperties;
  12924. }
  12925. //=== VK_KHR_external_memory_capabilities ===
  12926. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR(
  12927. const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo ) const VULKAN_HPP_NOEXCEPT
  12928. {
  12929. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR &&
  12930. "Function <vkGetPhysicalDeviceExternalBufferPropertiesKHR> requires <VK_KHR_external_memory_capabilities> or <VK_VERSION_1_1>" );
  12931. VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
  12932. getDispatcher()->vkGetPhysicalDeviceExternalBufferPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12933. reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
  12934. reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
  12935. return externalBufferProperties;
  12936. }
  12937. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  12938. //=== VK_KHR_external_memory_win32 ===
  12939. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE
  12940. Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo ) const
  12941. {
  12942. VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandleKHR && "Function <vkGetMemoryWin32HandleKHR> requires <VK_KHR_external_memory_win32>" );
  12943. HANDLE handle;
  12944. VkResult result = getDispatcher()->vkGetMemoryWin32HandleKHR(
  12945. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
  12946. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" );
  12947. return handle;
  12948. }
  12949. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR
  12950. Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle ) const
  12951. {
  12952. VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR &&
  12953. "Function <vkGetMemoryWin32HandlePropertiesKHR> requires <VK_KHR_external_memory_win32>" );
  12954. VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
  12955. VkResult result =
  12956. getDispatcher()->vkGetMemoryWin32HandlePropertiesKHR( static_cast<VkDevice>( m_device ),
  12957. static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
  12958. handle,
  12959. reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) );
  12960. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" );
  12961. return memoryWin32HandleProperties;
  12962. }
  12963. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  12964. //=== VK_KHR_external_memory_fd ===
  12965. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo ) const
  12966. {
  12967. VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdKHR && "Function <vkGetMemoryFdKHR> requires <VK_KHR_external_memory_fd>" );
  12968. int fd;
  12969. VkResult result =
  12970. getDispatcher()->vkGetMemoryFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd );
  12971. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" );
  12972. return fd;
  12973. }
  12974. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR
  12975. Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd ) const
  12976. {
  12977. VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryFdPropertiesKHR && "Function <vkGetMemoryFdPropertiesKHR> requires <VK_KHR_external_memory_fd>" );
  12978. VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties;
  12979. VkResult result = getDispatcher()->vkGetMemoryFdPropertiesKHR( static_cast<VkDevice>( m_device ),
  12980. static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
  12981. fd,
  12982. reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) );
  12983. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" );
  12984. return memoryFdProperties;
  12985. }
  12986. //=== VK_KHR_external_semaphore_capabilities ===
  12987. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR(
  12988. const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo ) const VULKAN_HPP_NOEXCEPT
  12989. {
  12990. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR &&
  12991. "Function <vkGetPhysicalDeviceExternalSemaphorePropertiesKHR> requires <VK_KHR_external_semaphore_capabilities> or <VK_VERSION_1_1>" );
  12992. VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
  12993. getDispatcher()->vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
  12994. static_cast<VkPhysicalDevice>( m_physicalDevice ),
  12995. reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
  12996. reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
  12997. return externalSemaphoreProperties;
  12998. }
  12999. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  13000. //=== VK_KHR_external_semaphore_win32 ===
  13001. VULKAN_HPP_INLINE void
  13002. Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo ) const
  13003. {
  13004. VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreWin32HandleKHR &&
  13005. "Function <vkImportSemaphoreWin32HandleKHR> requires <VK_KHR_external_semaphore_win32>" );
  13006. VkResult result = getDispatcher()->vkImportSemaphoreWin32HandleKHR(
  13007. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) );
  13008. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" );
  13009. }
  13010. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE
  13011. Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo ) const
  13012. {
  13013. VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreWin32HandleKHR &&
  13014. "Function <vkGetSemaphoreWin32HandleKHR> requires <VK_KHR_external_semaphore_win32>" );
  13015. HANDLE handle;
  13016. VkResult result = getDispatcher()->vkGetSemaphoreWin32HandleKHR(
  13017. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
  13018. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" );
  13019. return handle;
  13020. }
  13021. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  13022. //=== VK_KHR_external_semaphore_fd ===
  13023. VULKAN_HPP_INLINE void Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo ) const
  13024. {
  13025. VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreFdKHR && "Function <vkImportSemaphoreFdKHR> requires <VK_KHR_external_semaphore_fd>" );
  13026. VkResult result = getDispatcher()->vkImportSemaphoreFdKHR( static_cast<VkDevice>( m_device ),
  13027. reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) );
  13028. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" );
  13029. }
  13030. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo ) const
  13031. {
  13032. VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreFdKHR && "Function <vkGetSemaphoreFdKHR> requires <VK_KHR_external_semaphore_fd>" );
  13033. int fd;
  13034. VkResult result =
  13035. getDispatcher()->vkGetSemaphoreFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd );
  13036. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" );
  13037. return fd;
  13038. }
  13039. //=== VK_KHR_push_descriptor ===
  13040. VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR(
  13041. VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  13042. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  13043. uint32_t set,
  13044. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites ) const VULKAN_HPP_NOEXCEPT
  13045. {
  13046. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetKHR && "Function <vkCmdPushDescriptorSetKHR> requires <VK_KHR_push_descriptor>" );
  13047. getDispatcher()->vkCmdPushDescriptorSetKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  13048. static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
  13049. static_cast<VkPipelineLayout>( layout ),
  13050. set,
  13051. descriptorWrites.size(),
  13052. reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) );
  13053. }
  13054. template <typename DataType>
  13055. VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  13056. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  13057. uint32_t set,
  13058. DataType const & data ) const VULKAN_HPP_NOEXCEPT
  13059. {
  13060. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR &&
  13061. "Function <vkCmdPushDescriptorSetWithTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_KHR_push_descriptor>" );
  13062. getDispatcher()->vkCmdPushDescriptorSetWithTemplateKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  13063. static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
  13064. static_cast<VkPipelineLayout>( layout ),
  13065. set,
  13066. reinterpret_cast<const void *>( &data ) );
  13067. }
  13068. //=== VK_EXT_conditional_rendering ===
  13069. VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT(
  13070. const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT
  13071. {
  13072. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginConditionalRenderingEXT &&
  13073. "Function <vkCmdBeginConditionalRenderingEXT> requires <VK_EXT_conditional_rendering>" );
  13074. getDispatcher()->vkCmdBeginConditionalRenderingEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  13075. reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) );
  13076. }
  13077. VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT() const VULKAN_HPP_NOEXCEPT
  13078. {
  13079. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndConditionalRenderingEXT &&
  13080. "Function <vkCmdEndConditionalRenderingEXT> requires <VK_EXT_conditional_rendering>" );
  13081. getDispatcher()->vkCmdEndConditionalRenderingEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) );
  13082. }
  13083. //=== VK_KHR_descriptor_update_template ===
  13084. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate
  13085. Device::createDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const & createInfo,
  13086. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  13087. {
  13088. return VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate( *this, createInfo, allocator );
  13089. }
  13090. VULKAN_HPP_INLINE void
  13091. Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  13092. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_NOEXCEPT
  13093. {
  13094. VULKAN_HPP_ASSERT( getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR &&
  13095. "Function <vkDestroyDescriptorUpdateTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" );
  13096. getDispatcher()->vkDestroyDescriptorUpdateTemplateKHR(
  13097. static_cast<VkDevice>( m_device ),
  13098. static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
  13099. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  13100. }
  13101. template <typename DataType>
  13102. VULKAN_HPP_INLINE void DescriptorSet::updateWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  13103. DataType const & data ) const VULKAN_HPP_NOEXCEPT
  13104. {
  13105. VULKAN_HPP_ASSERT( getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR &&
  13106. "Function <vkUpdateDescriptorSetWithTemplateKHR> requires <VK_KHR_descriptor_update_template> or <VK_VERSION_1_1>" );
  13107. getDispatcher()->vkUpdateDescriptorSetWithTemplateKHR( static_cast<VkDevice>( m_device ),
  13108. static_cast<VkDescriptorSet>( m_descriptorSet ),
  13109. static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
  13110. reinterpret_cast<const void *>( &data ) );
  13111. }
  13112. //=== VK_NV_clip_space_w_scaling ===
  13113. VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV(
  13114. uint32_t firstViewport,
  13115. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings ) const VULKAN_HPP_NOEXCEPT
  13116. {
  13117. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingNV && "Function <vkCmdSetViewportWScalingNV> requires <VK_NV_clip_space_w_scaling>" );
  13118. getDispatcher()->vkCmdSetViewportWScalingNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  13119. firstViewport,
  13120. viewportWScalings.size(),
  13121. reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) );
  13122. }
  13123. # if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
  13124. //=== VK_EXT_acquire_xlib_display ===
  13125. VULKAN_HPP_INLINE void PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const
  13126. {
  13127. VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireXlibDisplayEXT && "Function <vkAcquireXlibDisplayEXT> requires <VK_EXT_acquire_xlib_display>" );
  13128. VkResult result =
  13129. getDispatcher()->vkAcquireXlibDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), &dpy, static_cast<VkDisplayKHR>( display ) );
  13130. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" );
  13131. }
  13132. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR PhysicalDevice::getRandROutputDisplayEXT( Display & dpy,
  13133. RROutput rrOutput ) const
  13134. {
  13135. return VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, dpy, rrOutput );
  13136. }
  13137. # endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
  13138. //=== VK_EXT_display_surface_counter ===
  13139. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT
  13140. PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface ) const
  13141. {
  13142. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT &&
  13143. "Function <vkGetPhysicalDeviceSurfaceCapabilities2EXT> requires <VK_EXT_display_surface_counter>" );
  13144. VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities;
  13145. VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  13146. static_cast<VkSurfaceKHR>( surface ),
  13147. reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) );
  13148. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" );
  13149. return surfaceCapabilities;
  13150. }
  13151. //=== VK_EXT_display_control ===
  13152. VULKAN_HPP_INLINE void Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  13153. const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo ) const
  13154. {
  13155. VULKAN_HPP_ASSERT( getDispatcher()->vkDisplayPowerControlEXT && "Function <vkDisplayPowerControlEXT> requires <VK_EXT_display_control>" );
  13156. VkResult result = getDispatcher()->vkDisplayPowerControlEXT(
  13157. static_cast<VkDevice>( m_device ), static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) );
  13158. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" );
  13159. }
  13160. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence
  13161. Device::registerEventEXT( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const & deviceEventInfo,
  13162. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  13163. {
  13164. return VULKAN_HPP_RAII_NAMESPACE::Fence( *this, deviceEventInfo, allocator );
  13165. }
  13166. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Fence
  13167. Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DisplayKHR const & display,
  13168. VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const & displayEventInfo,
  13169. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  13170. {
  13171. return VULKAN_HPP_RAII_NAMESPACE::Fence( *this, display, displayEventInfo, allocator );
  13172. }
  13173. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t SwapchainKHR::getCounterEXT( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter ) const
  13174. {
  13175. VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainCounterEXT && "Function <vkGetSwapchainCounterEXT> requires <VK_EXT_display_control>" );
  13176. uint64_t counterValue;
  13177. VkResult result = getDispatcher()->vkGetSwapchainCounterEXT(
  13178. static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue );
  13179. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getCounterEXT" );
  13180. return counterValue;
  13181. }
  13182. //=== VK_GOOGLE_display_timing ===
  13183. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE SwapchainKHR::getRefreshCycleDurationGOOGLE() const
  13184. {
  13185. VULKAN_HPP_ASSERT( getDispatcher()->vkGetRefreshCycleDurationGOOGLE && "Function <vkGetRefreshCycleDurationGOOGLE> requires <VK_GOOGLE_display_timing>" );
  13186. VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties;
  13187. VkResult result = getDispatcher()->vkGetRefreshCycleDurationGOOGLE( static_cast<VkDevice>( m_device ),
  13188. static_cast<VkSwapchainKHR>( m_swapchain ),
  13189. reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) );
  13190. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getRefreshCycleDurationGOOGLE" );
  13191. return displayTimingProperties;
  13192. }
  13193. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE> SwapchainKHR::getPastPresentationTimingGOOGLE() const
  13194. {
  13195. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPastPresentationTimingGOOGLE &&
  13196. "Function <vkGetPastPresentationTimingGOOGLE> requires <VK_GOOGLE_display_timing>" );
  13197. std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE> presentationTimings;
  13198. uint32_t presentationTimingCount;
  13199. VkResult result;
  13200. do
  13201. {
  13202. result = getDispatcher()->vkGetPastPresentationTimingGOOGLE(
  13203. static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), &presentationTimingCount, nullptr );
  13204. if ( ( result == VK_SUCCESS ) && presentationTimingCount )
  13205. {
  13206. presentationTimings.resize( presentationTimingCount );
  13207. result = getDispatcher()->vkGetPastPresentationTimingGOOGLE( static_cast<VkDevice>( m_device ),
  13208. static_cast<VkSwapchainKHR>( m_swapchain ),
  13209. &presentationTimingCount,
  13210. reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) );
  13211. }
  13212. } while ( result == VK_INCOMPLETE );
  13213. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getPastPresentationTimingGOOGLE" );
  13214. VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
  13215. if ( presentationTimingCount < presentationTimings.size() )
  13216. {
  13217. presentationTimings.resize( presentationTimingCount );
  13218. }
  13219. return presentationTimings;
  13220. }
  13221. //=== VK_EXT_discard_rectangles ===
  13222. VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT(
  13223. uint32_t firstDiscardRectangle, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles ) const VULKAN_HPP_NOEXCEPT
  13224. {
  13225. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDiscardRectangleEXT && "Function <vkCmdSetDiscardRectangleEXT> requires <VK_EXT_discard_rectangles>" );
  13226. getDispatcher()->vkCmdSetDiscardRectangleEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  13227. firstDiscardRectangle,
  13228. discardRectangles.size(),
  13229. reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) );
  13230. }
  13231. VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable ) const VULKAN_HPP_NOEXCEPT
  13232. {
  13233. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDiscardRectangleEnableEXT &&
  13234. "Function <vkCmdSetDiscardRectangleEnableEXT> requires <VK_EXT_discard_rectangles>" );
  13235. getDispatcher()->vkCmdSetDiscardRectangleEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( discardRectangleEnable ) );
  13236. }
  13237. VULKAN_HPP_INLINE void
  13238. CommandBuffer::setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode ) const VULKAN_HPP_NOEXCEPT
  13239. {
  13240. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDiscardRectangleModeEXT &&
  13241. "Function <vkCmdSetDiscardRectangleModeEXT> requires <VK_EXT_discard_rectangles>" );
  13242. getDispatcher()->vkCmdSetDiscardRectangleModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  13243. static_cast<VkDiscardRectangleModeEXT>( discardRectangleMode ) );
  13244. }
  13245. //=== VK_EXT_hdr_metadata ===
  13246. VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
  13247. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata ) const
  13248. {
  13249. VULKAN_HPP_ASSERT( getDispatcher()->vkSetHdrMetadataEXT && "Function <vkSetHdrMetadataEXT> requires <VK_EXT_hdr_metadata>" );
  13250. if ( swapchains.size() != metadata.size() )
  13251. {
  13252. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
  13253. }
  13254. getDispatcher()->vkSetHdrMetadataEXT( static_cast<VkDevice>( m_device ),
  13255. swapchains.size(),
  13256. reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ),
  13257. reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) );
  13258. }
  13259. //=== VK_KHR_create_renderpass2 ===
  13260. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::RenderPass
  13261. Device::createRenderPass2KHR( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const & createInfo,
  13262. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  13263. {
  13264. return VULKAN_HPP_RAII_NAMESPACE::RenderPass( *this, createInfo, allocator );
  13265. }
  13266. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
  13267. const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
  13268. {
  13269. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginRenderPass2KHR &&
  13270. "Function <vkCmdBeginRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
  13271. getDispatcher()->vkCmdBeginRenderPass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  13272. reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ),
  13273. reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
  13274. }
  13275. VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
  13276. const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT
  13277. {
  13278. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdNextSubpass2KHR && "Function <vkCmdNextSubpass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
  13279. getDispatcher()->vkCmdNextSubpass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  13280. reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ),
  13281. reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  13282. }
  13283. VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo ) const VULKAN_HPP_NOEXCEPT
  13284. {
  13285. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndRenderPass2KHR &&
  13286. "Function <vkCmdEndRenderPass2KHR> requires <VK_KHR_create_renderpass2> or <VK_VERSION_1_2>" );
  13287. getDispatcher()->vkCmdEndRenderPass2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  13288. }
  13289. //=== VK_KHR_shared_presentable_image ===
  13290. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result SwapchainKHR::getStatus() const
  13291. {
  13292. VULKAN_HPP_ASSERT( getDispatcher()->vkGetSwapchainStatusKHR && "Function <vkGetSwapchainStatusKHR> requires <VK_KHR_shared_presentable_image>" );
  13293. VkResult result = getDispatcher()->vkGetSwapchainStatusKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ) );
  13294. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13295. VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::getStatus",
  13296. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  13297. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  13298. }
  13299. //=== VK_KHR_external_fence_capabilities ===
  13300. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
  13301. PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo ) const VULKAN_HPP_NOEXCEPT
  13302. {
  13303. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR &&
  13304. "Function <vkGetPhysicalDeviceExternalFencePropertiesKHR> requires <VK_KHR_external_fence_capabilities> or <VK_VERSION_1_1>" );
  13305. VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
  13306. getDispatcher()->vkGetPhysicalDeviceExternalFencePropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  13307. reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
  13308. reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
  13309. return externalFenceProperties;
  13310. }
  13311. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  13312. //=== VK_KHR_external_fence_win32 ===
  13313. VULKAN_HPP_INLINE void Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo ) const
  13314. {
  13315. VULKAN_HPP_ASSERT( getDispatcher()->vkImportFenceWin32HandleKHR && "Function <vkImportFenceWin32HandleKHR> requires <VK_KHR_external_fence_win32>" );
  13316. VkResult result = getDispatcher()->vkImportFenceWin32HandleKHR(
  13317. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) );
  13318. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" );
  13319. }
  13320. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE HANDLE
  13321. Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo ) const
  13322. {
  13323. VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceWin32HandleKHR && "Function <vkGetFenceWin32HandleKHR> requires <VK_KHR_external_fence_win32>" );
  13324. HANDLE handle;
  13325. VkResult result = getDispatcher()->vkGetFenceWin32HandleKHR(
  13326. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
  13327. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" );
  13328. return handle;
  13329. }
  13330. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  13331. //=== VK_KHR_external_fence_fd ===
  13332. VULKAN_HPP_INLINE void Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo ) const
  13333. {
  13334. VULKAN_HPP_ASSERT( getDispatcher()->vkImportFenceFdKHR && "Function <vkImportFenceFdKHR> requires <VK_KHR_external_fence_fd>" );
  13335. VkResult result =
  13336. getDispatcher()->vkImportFenceFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) );
  13337. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" );
  13338. }
  13339. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE int Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo ) const
  13340. {
  13341. VULKAN_HPP_ASSERT( getDispatcher()->vkGetFenceFdKHR && "Function <vkGetFenceFdKHR> requires <VK_KHR_external_fence_fd>" );
  13342. int fd;
  13343. VkResult result = getDispatcher()->vkGetFenceFdKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd );
  13344. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" );
  13345. return fd;
  13346. }
  13347. //=== VK_KHR_performance_query ===
  13348. VULKAN_HPP_NODISCARD
  13349. VULKAN_HPP_INLINE std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>>
  13350. PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex ) const
  13351. {
  13352. VULKAN_HPP_ASSERT( getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR &&
  13353. "Function <vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR> requires <VK_KHR_performance_query>" );
  13354. std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>> data_;
  13355. std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> & counters = data_.first;
  13356. std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR> & counterDescriptions = data_.second;
  13357. uint32_t counterCount;
  13358. VkResult result;
  13359. do
  13360. {
  13361. result = getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
  13362. static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &counterCount, nullptr, nullptr );
  13363. if ( ( result == VK_SUCCESS ) && counterCount )
  13364. {
  13365. counters.resize( counterCount );
  13366. counterDescriptions.resize( counterCount );
  13367. result = getDispatcher()->vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
  13368. static_cast<VkPhysicalDevice>( m_physicalDevice ),
  13369. queueFamilyIndex,
  13370. &counterCount,
  13371. reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
  13372. reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) );
  13373. }
  13374. } while ( result == VK_INCOMPLETE );
  13375. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13376. VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
  13377. VULKAN_HPP_ASSERT( counterCount <= counters.size() );
  13378. if ( counterCount < counters.size() )
  13379. {
  13380. counters.resize( counterCount );
  13381. counterDescriptions.resize( counterCount );
  13382. }
  13383. return data_;
  13384. }
  13385. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR(
  13386. const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo ) const VULKAN_HPP_NOEXCEPT
  13387. {
  13388. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR &&
  13389. "Function <vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR> requires <VK_KHR_performance_query>" );
  13390. uint32_t numPasses;
  13391. getDispatcher()->vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
  13392. static_cast<VkPhysicalDevice>( m_physicalDevice ),
  13393. reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ),
  13394. &numPasses );
  13395. return numPasses;
  13396. }
  13397. VULKAN_HPP_INLINE void Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info ) const
  13398. {
  13399. VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireProfilingLockKHR && "Function <vkAcquireProfilingLockKHR> requires <VK_KHR_performance_query>" );
  13400. VkResult result =
  13401. getDispatcher()->vkAcquireProfilingLockKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) );
  13402. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" );
  13403. }
  13404. VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR() const VULKAN_HPP_NOEXCEPT
  13405. {
  13406. VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseProfilingLockKHR && "Function <vkReleaseProfilingLockKHR> requires <VK_KHR_performance_query>" );
  13407. getDispatcher()->vkReleaseProfilingLockKHR( static_cast<VkDevice>( m_device ) );
  13408. }
  13409. //=== VK_KHR_get_surface_capabilities2 ===
  13410. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR
  13411. PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
  13412. {
  13413. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR &&
  13414. "Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> requires <VK_KHR_get_surface_capabilities2>" );
  13415. VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities;
  13416. VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  13417. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  13418. reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
  13419. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
  13420. return surfaceCapabilities;
  13421. }
  13422. template <typename X, typename Y, typename... Z>
  13423. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  13424. PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
  13425. {
  13426. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR &&
  13427. "Function <vkGetPhysicalDeviceSurfaceCapabilities2KHR> requires <VK_KHR_get_surface_capabilities2>" );
  13428. StructureChain<X, Y, Z...> structureChain;
  13429. VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
  13430. VkResult result = getDispatcher()->vkGetPhysicalDeviceSurfaceCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  13431. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  13432. reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
  13433. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
  13434. return structureChain;
  13435. }
  13436. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>
  13437. PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
  13438. {
  13439. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR &&
  13440. "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" );
  13441. std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
  13442. uint32_t surfaceFormatCount;
  13443. VkResult result;
  13444. do
  13445. {
  13446. result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  13447. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  13448. &surfaceFormatCount,
  13449. nullptr );
  13450. if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
  13451. {
  13452. surfaceFormats.resize( surfaceFormatCount );
  13453. result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  13454. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  13455. &surfaceFormatCount,
  13456. reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
  13457. }
  13458. } while ( result == VK_INCOMPLETE );
  13459. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
  13460. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  13461. if ( surfaceFormatCount < surfaceFormats.size() )
  13462. {
  13463. surfaceFormats.resize( surfaceFormatCount );
  13464. }
  13465. return surfaceFormats;
  13466. }
  13467. template <typename StructureChain>
  13468. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain>
  13469. PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
  13470. {
  13471. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR &&
  13472. "Function <vkGetPhysicalDeviceSurfaceFormats2KHR> requires <VK_KHR_get_surface_capabilities2>" );
  13473. std::vector<StructureChain> structureChains;
  13474. std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
  13475. uint32_t surfaceFormatCount;
  13476. VkResult result;
  13477. do
  13478. {
  13479. result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  13480. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  13481. &surfaceFormatCount,
  13482. nullptr );
  13483. if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
  13484. {
  13485. structureChains.resize( surfaceFormatCount );
  13486. surfaceFormats.resize( surfaceFormatCount );
  13487. for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
  13488. {
  13489. surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
  13490. }
  13491. result = getDispatcher()->vkGetPhysicalDeviceSurfaceFormats2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  13492. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  13493. &surfaceFormatCount,
  13494. reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
  13495. }
  13496. } while ( result == VK_INCOMPLETE );
  13497. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
  13498. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  13499. if ( surfaceFormatCount < surfaceFormats.size() )
  13500. {
  13501. structureChains.resize( surfaceFormatCount );
  13502. }
  13503. for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
  13504. {
  13505. structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
  13506. }
  13507. return structureChains;
  13508. }
  13509. //=== VK_KHR_get_display_properties2 ===
  13510. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR> PhysicalDevice::getDisplayProperties2KHR() const
  13511. {
  13512. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR &&
  13513. "Function <vkGetPhysicalDeviceDisplayProperties2KHR> requires <VK_KHR_get_display_properties2>" );
  13514. std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR> properties;
  13515. uint32_t propertyCount;
  13516. VkResult result;
  13517. do
  13518. {
  13519. result = getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
  13520. if ( ( result == VK_SUCCESS ) && propertyCount )
  13521. {
  13522. properties.resize( propertyCount );
  13523. result = getDispatcher()->vkGetPhysicalDeviceDisplayProperties2KHR(
  13524. static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) );
  13525. }
  13526. } while ( result == VK_INCOMPLETE );
  13527. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
  13528. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  13529. if ( propertyCount < properties.size() )
  13530. {
  13531. properties.resize( propertyCount );
  13532. }
  13533. return properties;
  13534. }
  13535. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR> PhysicalDevice::getDisplayPlaneProperties2KHR() const
  13536. {
  13537. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR &&
  13538. "Function <vkGetPhysicalDeviceDisplayPlaneProperties2KHR> requires <VK_KHR_get_display_properties2>" );
  13539. std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR> properties;
  13540. uint32_t propertyCount;
  13541. VkResult result;
  13542. do
  13543. {
  13544. result = getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
  13545. if ( ( result == VK_SUCCESS ) && propertyCount )
  13546. {
  13547. properties.resize( propertyCount );
  13548. result = getDispatcher()->vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
  13549. static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) );
  13550. }
  13551. } while ( result == VK_INCOMPLETE );
  13552. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
  13553. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  13554. if ( propertyCount < properties.size() )
  13555. {
  13556. properties.resize( propertyCount );
  13557. }
  13558. return properties;
  13559. }
  13560. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR> DisplayKHR::getModeProperties2() const
  13561. {
  13562. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayModeProperties2KHR &&
  13563. "Function <vkGetDisplayModeProperties2KHR> requires <VK_KHR_get_display_properties2>" );
  13564. std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR> properties;
  13565. uint32_t propertyCount;
  13566. VkResult result;
  13567. do
  13568. {
  13569. result = getDispatcher()->vkGetDisplayModeProperties2KHR(
  13570. static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ), &propertyCount, nullptr );
  13571. if ( ( result == VK_SUCCESS ) && propertyCount )
  13572. {
  13573. properties.resize( propertyCount );
  13574. result = getDispatcher()->vkGetDisplayModeProperties2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  13575. static_cast<VkDisplayKHR>( m_display ),
  13576. &propertyCount,
  13577. reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) );
  13578. }
  13579. } while ( result == VK_INCOMPLETE );
  13580. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::getModeProperties2" );
  13581. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  13582. if ( propertyCount < properties.size() )
  13583. {
  13584. properties.resize( propertyCount );
  13585. }
  13586. return properties;
  13587. }
  13588. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR
  13589. PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo ) const
  13590. {
  13591. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDisplayPlaneCapabilities2KHR &&
  13592. "Function <vkGetDisplayPlaneCapabilities2KHR> requires <VK_KHR_get_display_properties2>" );
  13593. VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities;
  13594. VkResult result = getDispatcher()->vkGetDisplayPlaneCapabilities2KHR( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  13595. reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ),
  13596. reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) );
  13597. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
  13598. return capabilities;
  13599. }
  13600. # if defined( VK_USE_PLATFORM_IOS_MVK )
  13601. //=== VK_MVK_ios_surface ===
  13602. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  13603. Instance::createIOSSurfaceMVK( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const & createInfo,
  13604. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  13605. {
  13606. return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
  13607. }
  13608. # endif /*VK_USE_PLATFORM_IOS_MVK*/
  13609. # if defined( VK_USE_PLATFORM_MACOS_MVK )
  13610. //=== VK_MVK_macos_surface ===
  13611. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  13612. Instance::createMacOSSurfaceMVK( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const & createInfo,
  13613. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  13614. {
  13615. return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
  13616. }
  13617. # endif /*VK_USE_PLATFORM_MACOS_MVK*/
  13618. //=== VK_EXT_debug_utils ===
  13619. VULKAN_HPP_INLINE void Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo ) const
  13620. {
  13621. VULKAN_HPP_ASSERT( getDispatcher()->vkSetDebugUtilsObjectNameEXT && "Function <vkSetDebugUtilsObjectNameEXT> requires <VK_EXT_debug_utils>" );
  13622. VkResult result = getDispatcher()->vkSetDebugUtilsObjectNameEXT( static_cast<VkDevice>( m_device ),
  13623. reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) );
  13624. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" );
  13625. }
  13626. VULKAN_HPP_INLINE void Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo ) const
  13627. {
  13628. VULKAN_HPP_ASSERT( getDispatcher()->vkSetDebugUtilsObjectTagEXT && "Function <vkSetDebugUtilsObjectTagEXT> requires <VK_EXT_debug_utils>" );
  13629. VkResult result =
  13630. getDispatcher()->vkSetDebugUtilsObjectTagEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) );
  13631. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" );
  13632. }
  13633. VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT
  13634. {
  13635. VULKAN_HPP_ASSERT( getDispatcher()->vkQueueBeginDebugUtilsLabelEXT && "Function <vkQueueBeginDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" );
  13636. getDispatcher()->vkQueueBeginDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  13637. }
  13638. VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT
  13639. {
  13640. VULKAN_HPP_ASSERT( getDispatcher()->vkQueueEndDebugUtilsLabelEXT && "Function <vkQueueEndDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" );
  13641. getDispatcher()->vkQueueEndDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ) );
  13642. }
  13643. VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT
  13644. {
  13645. VULKAN_HPP_ASSERT( getDispatcher()->vkQueueInsertDebugUtilsLabelEXT && "Function <vkQueueInsertDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" );
  13646. getDispatcher()->vkQueueInsertDebugUtilsLabelEXT( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  13647. }
  13648. VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT
  13649. {
  13650. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBeginDebugUtilsLabelEXT && "Function <vkCmdBeginDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" );
  13651. getDispatcher()->vkCmdBeginDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  13652. reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  13653. }
  13654. VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT() const VULKAN_HPP_NOEXCEPT
  13655. {
  13656. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEndDebugUtilsLabelEXT && "Function <vkCmdEndDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" );
  13657. getDispatcher()->vkCmdEndDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ) );
  13658. }
  13659. VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo ) const VULKAN_HPP_NOEXCEPT
  13660. {
  13661. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInsertDebugUtilsLabelEXT && "Function <vkCmdInsertDebugUtilsLabelEXT> requires <VK_EXT_debug_utils>" );
  13662. getDispatcher()->vkCmdInsertDebugUtilsLabelEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  13663. reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  13664. }
  13665. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT
  13666. Instance::createDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const & createInfo,
  13667. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  13668. {
  13669. return VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT( *this, createInfo, allocator );
  13670. }
  13671. VULKAN_HPP_INLINE void
  13672. Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
  13673. VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
  13674. const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData ) const VULKAN_HPP_NOEXCEPT
  13675. {
  13676. VULKAN_HPP_ASSERT( getDispatcher()->vkSubmitDebugUtilsMessageEXT && "Function <vkSubmitDebugUtilsMessageEXT> requires <VK_EXT_debug_utils>" );
  13677. getDispatcher()->vkSubmitDebugUtilsMessageEXT( static_cast<VkInstance>( m_instance ),
  13678. static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
  13679. static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
  13680. reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) );
  13681. }
  13682. # if defined( VK_USE_PLATFORM_ANDROID_KHR )
  13683. //=== VK_ANDROID_external_memory_android_hardware_buffer ===
  13684. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID
  13685. Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const
  13686. {
  13687. VULKAN_HPP_ASSERT( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID &&
  13688. "Function <vkGetAndroidHardwareBufferPropertiesANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" );
  13689. VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
  13690. VkResult result = getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID(
  13691. static_cast<VkDevice>( m_device ), &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) );
  13692. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
  13693. return properties;
  13694. }
  13695. template <typename X, typename Y, typename... Z>
  13696. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  13697. Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer ) const
  13698. {
  13699. VULKAN_HPP_ASSERT( getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID &&
  13700. "Function <vkGetAndroidHardwareBufferPropertiesANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" );
  13701. StructureChain<X, Y, Z...> structureChain;
  13702. VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties =
  13703. structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
  13704. VkResult result = getDispatcher()->vkGetAndroidHardwareBufferPropertiesANDROID(
  13705. static_cast<VkDevice>( m_device ), &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) );
  13706. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
  13707. return structureChain;
  13708. }
  13709. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE struct AHardwareBuffer *
  13710. Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info ) const
  13711. {
  13712. VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID &&
  13713. "Function <vkGetMemoryAndroidHardwareBufferANDROID> requires <VK_ANDROID_external_memory_android_hardware_buffer>" );
  13714. struct AHardwareBuffer * buffer;
  13715. VkResult result = getDispatcher()->vkGetMemoryAndroidHardwareBufferANDROID(
  13716. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer );
  13717. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" );
  13718. return buffer;
  13719. }
  13720. # endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  13721. # if defined( VK_ENABLE_BETA_EXTENSIONS )
  13722. //=== VK_AMDX_shader_enqueue ===
  13723. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> Device::createExecutionGraphPipelinesAMDX(
  13724. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  13725. VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
  13726. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  13727. {
  13728. return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator );
  13729. }
  13730. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createExecutionGraphPipelineAMDX(
  13731. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  13732. VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX const & createInfo,
  13733. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  13734. {
  13735. return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator );
  13736. }
  13737. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX Pipeline::getExecutionGraphScratchSizeAMDX() const
  13738. {
  13739. VULKAN_HPP_ASSERT( getDispatcher()->vkGetExecutionGraphPipelineScratchSizeAMDX &&
  13740. "Function <vkGetExecutionGraphPipelineScratchSizeAMDX> requires <VK_AMDX_shader_enqueue>" );
  13741. VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo;
  13742. VkResult result = getDispatcher()->vkGetExecutionGraphPipelineScratchSizeAMDX(
  13743. static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( &sizeInfo ) );
  13744. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphScratchSizeAMDX" );
  13745. return sizeInfo;
  13746. }
  13747. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t
  13748. Pipeline::getExecutionGraphNodeIndexAMDX( const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo ) const
  13749. {
  13750. VULKAN_HPP_ASSERT( getDispatcher()->vkGetExecutionGraphPipelineNodeIndexAMDX &&
  13751. "Function <vkGetExecutionGraphPipelineNodeIndexAMDX> requires <VK_AMDX_shader_enqueue>" );
  13752. uint32_t nodeIndex;
  13753. VkResult result =
  13754. getDispatcher()->vkGetExecutionGraphPipelineNodeIndexAMDX( static_cast<VkDevice>( m_device ),
  13755. static_cast<VkPipeline>( m_pipeline ),
  13756. reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( &nodeInfo ),
  13757. &nodeIndex );
  13758. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getExecutionGraphNodeIndexAMDX" );
  13759. return nodeIndex;
  13760. }
  13761. VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch ) const VULKAN_HPP_NOEXCEPT
  13762. {
  13763. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX &&
  13764. "Function <vkCmdInitializeGraphScratchMemoryAMDX> requires <VK_AMDX_shader_enqueue>" );
  13765. getDispatcher()->vkCmdInitializeGraphScratchMemoryAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( scratch ) );
  13766. }
  13767. VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
  13768. const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT
  13769. {
  13770. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphAMDX && "Function <vkCmdDispatchGraphAMDX> requires <VK_AMDX_shader_enqueue>" );
  13771. getDispatcher()->vkCmdDispatchGraphAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ),
  13772. static_cast<VkDeviceAddress>( scratch ),
  13773. reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
  13774. }
  13775. VULKAN_HPP_INLINE void
  13776. CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
  13777. const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo ) const VULKAN_HPP_NOEXCEPT
  13778. {
  13779. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectAMDX && "Function <vkCmdDispatchGraphIndirectAMDX> requires <VK_AMDX_shader_enqueue>" );
  13780. getDispatcher()->vkCmdDispatchGraphIndirectAMDX( static_cast<VkCommandBuffer>( m_commandBuffer ),
  13781. static_cast<VkDeviceAddress>( scratch ),
  13782. reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
  13783. }
  13784. VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
  13785. VULKAN_HPP_NAMESPACE::DeviceAddress countInfo ) const VULKAN_HPP_NOEXCEPT
  13786. {
  13787. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX &&
  13788. "Function <vkCmdDispatchGraphIndirectCountAMDX> requires <VK_AMDX_shader_enqueue>" );
  13789. getDispatcher()->vkCmdDispatchGraphIndirectCountAMDX(
  13790. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( scratch ), static_cast<VkDeviceAddress>( countInfo ) );
  13791. }
  13792. # endif /*VK_ENABLE_BETA_EXTENSIONS*/
  13793. //=== VK_EXT_sample_locations ===
  13794. VULKAN_HPP_INLINE void
  13795. CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT
  13796. {
  13797. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEXT && "Function <vkCmdSetSampleLocationsEXT> requires <VK_EXT_sample_locations>" );
  13798. getDispatcher()->vkCmdSetSampleLocationsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  13799. reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
  13800. }
  13801. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT
  13802. PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples ) const VULKAN_HPP_NOEXCEPT
  13803. {
  13804. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT &&
  13805. "Function <vkGetPhysicalDeviceMultisamplePropertiesEXT> requires <VK_EXT_sample_locations>" );
  13806. VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties;
  13807. getDispatcher()->vkGetPhysicalDeviceMultisamplePropertiesEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  13808. static_cast<VkSampleCountFlagBits>( samples ),
  13809. reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
  13810. return multisampleProperties;
  13811. }
  13812. //=== VK_KHR_get_memory_requirements2 ===
  13813. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  13814. Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
  13815. {
  13816. VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2KHR &&
  13817. "Function <vkGetImageMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
  13818. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  13819. getDispatcher()->vkGetImageMemoryRequirements2KHR( static_cast<VkDevice>( m_device ),
  13820. reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
  13821. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  13822. return memoryRequirements;
  13823. }
  13824. template <typename X, typename Y, typename... Z>
  13825. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  13826. Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
  13827. {
  13828. VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageMemoryRequirements2KHR &&
  13829. "Function <vkGetImageMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
  13830. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  13831. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  13832. getDispatcher()->vkGetImageMemoryRequirements2KHR( static_cast<VkDevice>( m_device ),
  13833. reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ),
  13834. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  13835. return structureChain;
  13836. }
  13837. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  13838. Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
  13839. {
  13840. VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2KHR &&
  13841. "Function <vkGetBufferMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
  13842. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  13843. getDispatcher()->vkGetBufferMemoryRequirements2KHR( static_cast<VkDevice>( m_device ),
  13844. reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
  13845. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  13846. return memoryRequirements;
  13847. }
  13848. template <typename X, typename Y, typename... Z>
  13849. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  13850. Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info ) const VULKAN_HPP_NOEXCEPT
  13851. {
  13852. VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferMemoryRequirements2KHR &&
  13853. "Function <vkGetBufferMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
  13854. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  13855. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  13856. getDispatcher()->vkGetBufferMemoryRequirements2KHR( static_cast<VkDevice>( m_device ),
  13857. reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ),
  13858. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  13859. return structureChain;
  13860. }
  13861. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
  13862. Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info ) const
  13863. {
  13864. VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageSparseMemoryRequirements2KHR &&
  13865. "Function <vkGetImageSparseMemoryRequirements2KHR> requires <VK_KHR_get_memory_requirements2> or <VK_VERSION_1_1>" );
  13866. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements;
  13867. uint32_t sparseMemoryRequirementCount;
  13868. getDispatcher()->vkGetImageSparseMemoryRequirements2KHR(
  13869. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
  13870. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  13871. getDispatcher()->vkGetImageSparseMemoryRequirements2KHR( static_cast<VkDevice>( m_device ),
  13872. reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
  13873. &sparseMemoryRequirementCount,
  13874. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  13875. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  13876. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  13877. {
  13878. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  13879. }
  13880. return sparseMemoryRequirements;
  13881. }
  13882. //=== VK_KHR_acceleration_structure ===
  13883. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR
  13884. Device::createAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const & createInfo,
  13885. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  13886. {
  13887. return VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR( *this, createInfo, allocator );
  13888. }
  13889. VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR(
  13890. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
  13891. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos ) const
  13892. {
  13893. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructuresKHR &&
  13894. "Function <vkCmdBuildAccelerationStructuresKHR> requires <VK_KHR_acceleration_structure>" );
  13895. if ( infos.size() != pBuildRangeInfos.size() )
  13896. {
  13897. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
  13898. }
  13899. getDispatcher()->vkCmdBuildAccelerationStructuresKHR(
  13900. static_cast<VkCommandBuffer>( m_commandBuffer ),
  13901. infos.size(),
  13902. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
  13903. reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
  13904. }
  13905. VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR(
  13906. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
  13907. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
  13908. VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides,
  13909. VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts ) const
  13910. {
  13911. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR &&
  13912. "Function <vkCmdBuildAccelerationStructuresIndirectKHR> requires <VK_KHR_acceleration_structure>" );
  13913. if ( infos.size() != indirectDeviceAddresses.size() )
  13914. {
  13915. throw LogicError( VULKAN_HPP_NAMESPACE_STRING
  13916. "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" );
  13917. }
  13918. if ( infos.size() != indirectStrides.size() )
  13919. {
  13920. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" );
  13921. }
  13922. if ( infos.size() != pMaxPrimitiveCounts.size() )
  13923. {
  13924. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" );
  13925. }
  13926. getDispatcher()->vkCmdBuildAccelerationStructuresIndirectKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  13927. infos.size(),
  13928. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
  13929. reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ),
  13930. indirectStrides.data(),
  13931. pMaxPrimitiveCounts.data() );
  13932. }
  13933. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR(
  13934. VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  13935. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
  13936. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos ) const
  13937. {
  13938. VULKAN_HPP_ASSERT( getDispatcher()->vkBuildAccelerationStructuresKHR &&
  13939. "Function <vkBuildAccelerationStructuresKHR> requires <VK_KHR_acceleration_structure>" );
  13940. if ( infos.size() != pBuildRangeInfos.size() )
  13941. {
  13942. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
  13943. }
  13944. VkResult result = getDispatcher()->vkBuildAccelerationStructuresKHR(
  13945. static_cast<VkDevice>( m_device ),
  13946. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  13947. infos.size(),
  13948. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
  13949. reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
  13950. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13951. VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR",
  13952. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  13953. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  13954. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  13955. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  13956. }
  13957. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  13958. Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  13959. const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const
  13960. {
  13961. VULKAN_HPP_ASSERT( getDispatcher()->vkCopyAccelerationStructureKHR &&
  13962. "Function <vkCopyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" );
  13963. VkResult result = getDispatcher()->vkCopyAccelerationStructureKHR( static_cast<VkDevice>( m_device ),
  13964. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  13965. reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
  13966. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13967. VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR",
  13968. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  13969. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  13970. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  13971. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  13972. }
  13973. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  13974. Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  13975. const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const
  13976. {
  13977. VULKAN_HPP_ASSERT( getDispatcher()->vkCopyAccelerationStructureToMemoryKHR &&
  13978. "Function <vkCopyAccelerationStructureToMemoryKHR> requires <VK_KHR_acceleration_structure>" );
  13979. VkResult result =
  13980. getDispatcher()->vkCopyAccelerationStructureToMemoryKHR( static_cast<VkDevice>( m_device ),
  13981. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  13982. reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
  13983. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13984. VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR",
  13985. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  13986. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  13987. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  13988. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  13989. }
  13990. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  13991. Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  13992. const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const
  13993. {
  13994. VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToAccelerationStructureKHR &&
  13995. "Function <vkCopyMemoryToAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" );
  13996. VkResult result =
  13997. getDispatcher()->vkCopyMemoryToAccelerationStructureKHR( static_cast<VkDevice>( m_device ),
  13998. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  13999. reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
  14000. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14001. VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR",
  14002. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  14003. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  14004. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  14005. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  14006. }
  14007. template <typename DataType>
  14008. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType> Device::writeAccelerationStructuresPropertiesKHR(
  14009. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
  14010. VULKAN_HPP_NAMESPACE::QueryType queryType,
  14011. size_t dataSize,
  14012. size_t stride ) const
  14013. {
  14014. VULKAN_HPP_ASSERT( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR &&
  14015. "Function <vkWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" );
  14016. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  14017. std::vector<DataType> data( dataSize / sizeof( DataType ) );
  14018. VkResult result =
  14019. getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR( static_cast<VkDevice>( m_device ),
  14020. accelerationStructures.size(),
  14021. reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
  14022. static_cast<VkQueryType>( queryType ),
  14023. data.size() * sizeof( DataType ),
  14024. reinterpret_cast<void *>( data.data() ),
  14025. stride );
  14026. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
  14027. return data;
  14028. }
  14029. template <typename DataType>
  14030. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Device::writeAccelerationStructuresPropertyKHR(
  14031. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
  14032. VULKAN_HPP_NAMESPACE::QueryType queryType,
  14033. size_t stride ) const
  14034. {
  14035. VULKAN_HPP_ASSERT( getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR &&
  14036. "Function <vkWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" );
  14037. DataType data;
  14038. VkResult result =
  14039. getDispatcher()->vkWriteAccelerationStructuresPropertiesKHR( static_cast<VkDevice>( m_device ),
  14040. accelerationStructures.size(),
  14041. reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
  14042. static_cast<VkQueryType>( queryType ),
  14043. sizeof( DataType ),
  14044. reinterpret_cast<void *>( &data ),
  14045. stride );
  14046. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" );
  14047. return data;
  14048. }
  14049. VULKAN_HPP_INLINE void
  14050. CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
  14051. {
  14052. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureKHR &&
  14053. "Function <vkCmdCopyAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" );
  14054. getDispatcher()->vkCmdCopyAccelerationStructureKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14055. reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
  14056. }
  14057. VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR(
  14058. const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
  14059. {
  14060. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR &&
  14061. "Function <vkCmdCopyAccelerationStructureToMemoryKHR> requires <VK_KHR_acceleration_structure>" );
  14062. getDispatcher()->vkCmdCopyAccelerationStructureToMemoryKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14063. reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
  14064. }
  14065. VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR(
  14066. const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
  14067. {
  14068. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR &&
  14069. "Function <vkCmdCopyMemoryToAccelerationStructureKHR> requires <VK_KHR_acceleration_structure>" );
  14070. getDispatcher()->vkCmdCopyMemoryToAccelerationStructureKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14071. reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
  14072. }
  14073. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
  14074. Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
  14075. {
  14076. VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR &&
  14077. "Function <vkGetAccelerationStructureDeviceAddressKHR> requires <VK_KHR_acceleration_structure>" );
  14078. VkDeviceAddress result = getDispatcher()->vkGetAccelerationStructureDeviceAddressKHR(
  14079. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) );
  14080. return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
  14081. }
  14082. VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR(
  14083. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
  14084. VULKAN_HPP_NAMESPACE::QueryType queryType,
  14085. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  14086. uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
  14087. {
  14088. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR &&
  14089. "Function <vkCmdWriteAccelerationStructuresPropertiesKHR> requires <VK_KHR_acceleration_structure>" );
  14090. getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14091. accelerationStructures.size(),
  14092. reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
  14093. static_cast<VkQueryType>( queryType ),
  14094. static_cast<VkQueryPool>( queryPool ),
  14095. firstQuery );
  14096. }
  14097. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR Device::getAccelerationStructureCompatibilityKHR(
  14098. const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo ) const VULKAN_HPP_NOEXCEPT
  14099. {
  14100. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR &&
  14101. "Function <vkGetDeviceAccelerationStructureCompatibilityKHR> requires <VK_KHR_acceleration_structure>" );
  14102. VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
  14103. getDispatcher()->vkGetDeviceAccelerationStructureCompatibilityKHR( static_cast<VkDevice>( m_device ),
  14104. reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ),
  14105. reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
  14106. return compatibility;
  14107. }
  14108. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR
  14109. Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
  14110. const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,
  14111. VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts ) const
  14112. {
  14113. VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureBuildSizesKHR &&
  14114. "Function <vkGetAccelerationStructureBuildSizesKHR> requires <VK_KHR_acceleration_structure>" );
  14115. if ( maxPrimitiveCounts.size() != buildInfo.geometryCount )
  14116. {
  14117. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" );
  14118. }
  14119. VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo;
  14120. getDispatcher()->vkGetAccelerationStructureBuildSizesKHR( static_cast<VkDevice>( m_device ),
  14121. static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
  14122. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ),
  14123. maxPrimitiveCounts.data(),
  14124. reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) );
  14125. return sizeInfo;
  14126. }
  14127. //=== VK_KHR_ray_tracing_pipeline ===
  14128. VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
  14129. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
  14130. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
  14131. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
  14132. uint32_t width,
  14133. uint32_t height,
  14134. uint32_t depth ) const VULKAN_HPP_NOEXCEPT
  14135. {
  14136. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysKHR && "Function <vkCmdTraceRaysKHR> requires <VK_KHR_ray_tracing_pipeline>" );
  14137. getDispatcher()->vkCmdTraceRaysKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14138. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
  14139. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
  14140. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
  14141. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
  14142. width,
  14143. height,
  14144. depth );
  14145. }
  14146. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> Device::createRayTracingPipelinesKHR(
  14147. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
  14148. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  14149. VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
  14150. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  14151. {
  14152. return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, deferredOperation, pipelineCache, createInfos, allocator );
  14153. }
  14154. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createRayTracingPipelineKHR(
  14155. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR> const & deferredOperation,
  14156. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  14157. VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const & createInfo,
  14158. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  14159. {
  14160. return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, deferredOperation, pipelineCache, createInfo, allocator );
  14161. }
  14162. template <typename DataType>
  14163. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType>
  14164. Pipeline::getRayTracingShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const
  14165. {
  14166. VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR &&
  14167. "Function <vkGetRayTracingShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" );
  14168. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  14169. std::vector<DataType> data( dataSize / sizeof( DataType ) );
  14170. VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ),
  14171. static_cast<VkPipeline>( m_pipeline ),
  14172. firstGroup,
  14173. groupCount,
  14174. data.size() * sizeof( DataType ),
  14175. reinterpret_cast<void *>( data.data() ) );
  14176. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesKHR" );
  14177. return data;
  14178. }
  14179. template <typename DataType>
  14180. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const
  14181. {
  14182. VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR &&
  14183. "Function <vkGetRayTracingShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" );
  14184. DataType data;
  14185. VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ),
  14186. static_cast<VkPipeline>( m_pipeline ),
  14187. firstGroup,
  14188. groupCount,
  14189. sizeof( DataType ),
  14190. reinterpret_cast<void *>( &data ) );
  14191. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleKHR" );
  14192. return data;
  14193. }
  14194. template <typename DataType>
  14195. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType>
  14196. Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const
  14197. {
  14198. VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR &&
  14199. "Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline>" );
  14200. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  14201. std::vector<DataType> data( dataSize / sizeof( DataType ) );
  14202. VkResult result = getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ),
  14203. static_cast<VkPipeline>( m_pipeline ),
  14204. firstGroup,
  14205. groupCount,
  14206. data.size() * sizeof( DataType ),
  14207. reinterpret_cast<void *>( data.data() ) );
  14208. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14209. VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
  14210. return data;
  14211. }
  14212. template <typename DataType>
  14213. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR( uint32_t firstGroup, uint32_t groupCount ) const
  14214. {
  14215. VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR &&
  14216. "Function <vkGetRayTracingCaptureReplayShaderGroupHandlesKHR> requires <VK_KHR_ray_tracing_pipeline>" );
  14217. DataType data;
  14218. VkResult result = getDispatcher()->vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( static_cast<VkDevice>( m_device ),
  14219. static_cast<VkPipeline>( m_pipeline ),
  14220. firstGroup,
  14221. groupCount,
  14222. sizeof( DataType ),
  14223. reinterpret_cast<void *>( &data ) );
  14224. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14225. VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingCaptureReplayShaderGroupHandleKHR" );
  14226. return data;
  14227. }
  14228. VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
  14229. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
  14230. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
  14231. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
  14232. VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT
  14233. {
  14234. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysIndirectKHR && "Function <vkCmdTraceRaysIndirectKHR> requires <VK_KHR_ray_tracing_pipeline>" );
  14235. getDispatcher()->vkCmdTraceRaysIndirectKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14236. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
  14237. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
  14238. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
  14239. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
  14240. static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
  14241. }
  14242. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize
  14243. Pipeline::getRayTracingShaderGroupStackSizeKHR( uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT
  14244. {
  14245. VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR &&
  14246. "Function <vkGetRayTracingShaderGroupStackSizeKHR> requires <VK_KHR_ray_tracing_pipeline>" );
  14247. VkDeviceSize result = getDispatcher()->vkGetRayTracingShaderGroupStackSizeKHR(
  14248. static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) );
  14249. return static_cast<VULKAN_HPP_NAMESPACE::DeviceSize>( result );
  14250. }
  14251. VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT
  14252. {
  14253. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR &&
  14254. "Function <vkCmdSetRayTracingPipelineStackSizeKHR> requires <VK_KHR_ray_tracing_pipeline>" );
  14255. getDispatcher()->vkCmdSetRayTracingPipelineStackSizeKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), pipelineStackSize );
  14256. }
  14257. //=== VK_KHR_sampler_ycbcr_conversion ===
  14258. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion
  14259. Device::createSamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const & createInfo,
  14260. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  14261. {
  14262. return VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion( *this, createInfo, allocator );
  14263. }
  14264. VULKAN_HPP_INLINE void
  14265. Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
  14266. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_NOEXCEPT
  14267. {
  14268. VULKAN_HPP_ASSERT( getDispatcher()->vkDestroySamplerYcbcrConversionKHR &&
  14269. "Function <vkDestroySamplerYcbcrConversionKHR> requires <VK_KHR_sampler_ycbcr_conversion> or <VK_VERSION_1_1>" );
  14270. getDispatcher()->vkDestroySamplerYcbcrConversionKHR(
  14271. static_cast<VkDevice>( m_device ),
  14272. static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
  14273. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  14274. }
  14275. //=== VK_KHR_bind_memory2 ===
  14276. VULKAN_HPP_INLINE void
  14277. Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos ) const
  14278. {
  14279. VULKAN_HPP_ASSERT( getDispatcher()->vkBindBufferMemory2KHR && "Function <vkBindBufferMemory2KHR> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" );
  14280. VkResult result = getDispatcher()->vkBindBufferMemory2KHR(
  14281. static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) );
  14282. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" );
  14283. }
  14284. VULKAN_HPP_INLINE void
  14285. Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos ) const
  14286. {
  14287. VULKAN_HPP_ASSERT( getDispatcher()->vkBindImageMemory2KHR && "Function <vkBindImageMemory2KHR> requires <VK_KHR_bind_memory2> or <VK_VERSION_1_1>" );
  14288. VkResult result = getDispatcher()->vkBindImageMemory2KHR(
  14289. static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) );
  14290. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" );
  14291. }
  14292. //=== VK_EXT_image_drm_format_modifier ===
  14293. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT Image::getDrmFormatModifierPropertiesEXT() const
  14294. {
  14295. VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT &&
  14296. "Function <vkGetImageDrmFormatModifierPropertiesEXT> requires <VK_EXT_image_drm_format_modifier>" );
  14297. VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties;
  14298. VkResult result = getDispatcher()->vkGetImageDrmFormatModifierPropertiesEXT(
  14299. static_cast<VkDevice>( m_device ), static_cast<VkImage>( m_image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) );
  14300. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Image::getDrmFormatModifierPropertiesEXT" );
  14301. return properties;
  14302. }
  14303. //=== VK_EXT_validation_cache ===
  14304. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT
  14305. Device::createValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const & createInfo,
  14306. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  14307. {
  14308. return VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT( *this, createInfo, allocator );
  14309. }
  14310. VULKAN_HPP_INLINE void ValidationCacheEXT::merge( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches ) const
  14311. {
  14312. VULKAN_HPP_ASSERT( getDispatcher()->vkMergeValidationCachesEXT && "Function <vkMergeValidationCachesEXT> requires <VK_EXT_validation_cache>" );
  14313. VkResult result = getDispatcher()->vkMergeValidationCachesEXT( static_cast<VkDevice>( m_device ),
  14314. static_cast<VkValidationCacheEXT>( m_validationCache ),
  14315. srcCaches.size(),
  14316. reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) );
  14317. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::merge" );
  14318. }
  14319. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<uint8_t> ValidationCacheEXT::getData() const
  14320. {
  14321. VULKAN_HPP_ASSERT( getDispatcher()->vkGetValidationCacheDataEXT && "Function <vkGetValidationCacheDataEXT> requires <VK_EXT_validation_cache>" );
  14322. std::vector<uint8_t> data;
  14323. size_t dataSize;
  14324. VkResult result;
  14325. do
  14326. {
  14327. result = getDispatcher()->vkGetValidationCacheDataEXT(
  14328. static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( m_validationCache ), &dataSize, nullptr );
  14329. if ( ( result == VK_SUCCESS ) && dataSize )
  14330. {
  14331. data.resize( dataSize );
  14332. result = getDispatcher()->vkGetValidationCacheDataEXT(
  14333. static_cast<VkDevice>( m_device ), static_cast<VkValidationCacheEXT>( m_validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
  14334. }
  14335. } while ( result == VK_INCOMPLETE );
  14336. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::ValidationCacheEXT::getData" );
  14337. VULKAN_HPP_ASSERT( dataSize <= data.size() );
  14338. if ( dataSize < data.size() )
  14339. {
  14340. data.resize( dataSize );
  14341. }
  14342. return data;
  14343. }
  14344. //=== VK_NV_shading_rate_image ===
  14345. VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView,
  14346. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
  14347. {
  14348. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindShadingRateImageNV && "Function <vkCmdBindShadingRateImageNV> requires <VK_NV_shading_rate_image>" );
  14349. getDispatcher()->vkCmdBindShadingRateImageNV(
  14350. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
  14351. }
  14352. VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV(
  14353. uint32_t firstViewport,
  14354. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes ) const VULKAN_HPP_NOEXCEPT
  14355. {
  14356. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportShadingRatePaletteNV &&
  14357. "Function <vkCmdSetViewportShadingRatePaletteNV> requires <VK_NV_shading_rate_image>" );
  14358. getDispatcher()->vkCmdSetViewportShadingRatePaletteNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14359. firstViewport,
  14360. shadingRatePalettes.size(),
  14361. reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) );
  14362. }
  14363. VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV(
  14364. VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
  14365. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders ) const VULKAN_HPP_NOEXCEPT
  14366. {
  14367. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoarseSampleOrderNV && "Function <vkCmdSetCoarseSampleOrderNV> requires <VK_NV_shading_rate_image>" );
  14368. getDispatcher()->vkCmdSetCoarseSampleOrderNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14369. static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
  14370. customSampleOrders.size(),
  14371. reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) );
  14372. }
  14373. //=== VK_NV_ray_tracing ===
  14374. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV
  14375. Device::createAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const & createInfo,
  14376. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  14377. {
  14378. return VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV( *this, createInfo, allocator );
  14379. }
  14380. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV(
  14381. const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT
  14382. {
  14383. VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV &&
  14384. "Function <vkGetAccelerationStructureMemoryRequirementsNV> requires <VK_NV_ray_tracing>" );
  14385. VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements;
  14386. getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
  14387. reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
  14388. reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
  14389. return memoryRequirements;
  14390. }
  14391. template <typename X, typename Y, typename... Z>
  14392. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getAccelerationStructureMemoryRequirementsNV(
  14393. const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT
  14394. {
  14395. VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV &&
  14396. "Function <vkGetAccelerationStructureMemoryRequirementsNV> requires <VK_NV_ray_tracing>" );
  14397. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  14398. VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
  14399. getDispatcher()->vkGetAccelerationStructureMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
  14400. reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
  14401. reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
  14402. return structureChain;
  14403. }
  14404. VULKAN_HPP_INLINE void Device::bindAccelerationStructureMemoryNV(
  14405. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos ) const
  14406. {
  14407. VULKAN_HPP_ASSERT( getDispatcher()->vkBindAccelerationStructureMemoryNV &&
  14408. "Function <vkBindAccelerationStructureMemoryNV> requires <VK_NV_ray_tracing>" );
  14409. VkResult result = getDispatcher()->vkBindAccelerationStructureMemoryNV(
  14410. static_cast<VkDevice>( m_device ), bindInfos.size(), reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) );
  14411. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" );
  14412. }
  14413. VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info,
  14414. VULKAN_HPP_NAMESPACE::Buffer instanceData,
  14415. VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
  14416. VULKAN_HPP_NAMESPACE::Bool32 update,
  14417. VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
  14418. VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
  14419. VULKAN_HPP_NAMESPACE::Buffer scratch,
  14420. VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT
  14421. {
  14422. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildAccelerationStructureNV && "Function <vkCmdBuildAccelerationStructureNV> requires <VK_NV_ray_tracing>" );
  14423. getDispatcher()->vkCmdBuildAccelerationStructureNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14424. reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ),
  14425. static_cast<VkBuffer>( instanceData ),
  14426. static_cast<VkDeviceSize>( instanceOffset ),
  14427. static_cast<VkBool32>( update ),
  14428. static_cast<VkAccelerationStructureNV>( dst ),
  14429. static_cast<VkAccelerationStructureNV>( src ),
  14430. static_cast<VkBuffer>( scratch ),
  14431. static_cast<VkDeviceSize>( scratchOffset ) );
  14432. }
  14433. VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
  14434. VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
  14435. VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT
  14436. {
  14437. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyAccelerationStructureNV && "Function <vkCmdCopyAccelerationStructureNV> requires <VK_NV_ray_tracing>" );
  14438. getDispatcher()->vkCmdCopyAccelerationStructureNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14439. static_cast<VkAccelerationStructureNV>( dst ),
  14440. static_cast<VkAccelerationStructureNV>( src ),
  14441. static_cast<VkCopyAccelerationStructureModeKHR>( mode ) );
  14442. }
  14443. VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer,
  14444. VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,
  14445. VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer,
  14446. VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,
  14447. VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,
  14448. VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer,
  14449. VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,
  14450. VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,
  14451. VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,
  14452. VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,
  14453. VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,
  14454. uint32_t width,
  14455. uint32_t height,
  14456. uint32_t depth ) const VULKAN_HPP_NOEXCEPT
  14457. {
  14458. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysNV && "Function <vkCmdTraceRaysNV> requires <VK_NV_ray_tracing>" );
  14459. getDispatcher()->vkCmdTraceRaysNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14460. static_cast<VkBuffer>( raygenShaderBindingTableBuffer ),
  14461. static_cast<VkDeviceSize>( raygenShaderBindingOffset ),
  14462. static_cast<VkBuffer>( missShaderBindingTableBuffer ),
  14463. static_cast<VkDeviceSize>( missShaderBindingOffset ),
  14464. static_cast<VkDeviceSize>( missShaderBindingStride ),
  14465. static_cast<VkBuffer>( hitShaderBindingTableBuffer ),
  14466. static_cast<VkDeviceSize>( hitShaderBindingOffset ),
  14467. static_cast<VkDeviceSize>( hitShaderBindingStride ),
  14468. static_cast<VkBuffer>( callableShaderBindingTableBuffer ),
  14469. static_cast<VkDeviceSize>( callableShaderBindingOffset ),
  14470. static_cast<VkDeviceSize>( callableShaderBindingStride ),
  14471. width,
  14472. height,
  14473. depth );
  14474. }
  14475. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::Pipeline> Device::createRayTracingPipelinesNV(
  14476. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  14477. VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
  14478. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  14479. {
  14480. return VULKAN_HPP_RAII_NAMESPACE::Pipelines( *this, pipelineCache, createInfos, allocator );
  14481. }
  14482. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::Pipeline Device::createRayTracingPipelineNV(
  14483. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::VULKAN_HPP_RAII_NAMESPACE::PipelineCache> const & pipelineCache,
  14484. VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const & createInfo,
  14485. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  14486. {
  14487. return VULKAN_HPP_RAII_NAMESPACE::Pipeline( *this, pipelineCache, createInfo, allocator );
  14488. }
  14489. template <typename DataType>
  14490. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType>
  14491. Pipeline::getRayTracingShaderGroupHandlesNV( uint32_t firstGroup, uint32_t groupCount, size_t dataSize ) const
  14492. {
  14493. VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV &&
  14494. "Function <vkGetRayTracingShaderGroupHandlesNV> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" );
  14495. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  14496. std::vector<DataType> data( dataSize / sizeof( DataType ) );
  14497. VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast<VkDevice>( m_device ),
  14498. static_cast<VkPipeline>( m_pipeline ),
  14499. firstGroup,
  14500. groupCount,
  14501. data.size() * sizeof( DataType ),
  14502. reinterpret_cast<void *>( data.data() ) );
  14503. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandlesNV" );
  14504. return data;
  14505. }
  14506. template <typename DataType>
  14507. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Pipeline::getRayTracingShaderGroupHandleNV( uint32_t firstGroup, uint32_t groupCount ) const
  14508. {
  14509. VULKAN_HPP_ASSERT( getDispatcher()->vkGetRayTracingShaderGroupHandlesNV &&
  14510. "Function <vkGetRayTracingShaderGroupHandlesNV> requires <VK_KHR_ray_tracing_pipeline> or <VK_NV_ray_tracing>" );
  14511. DataType data;
  14512. VkResult result = getDispatcher()->vkGetRayTracingShaderGroupHandlesNV( static_cast<VkDevice>( m_device ),
  14513. static_cast<VkPipeline>( m_pipeline ),
  14514. firstGroup,
  14515. groupCount,
  14516. sizeof( DataType ),
  14517. reinterpret_cast<void *>( &data ) );
  14518. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::getRayTracingShaderGroupHandleNV" );
  14519. return data;
  14520. }
  14521. template <typename DataType>
  14522. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType> AccelerationStructureNV::getHandle( size_t dataSize ) const
  14523. {
  14524. VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureHandleNV && "Function <vkGetAccelerationStructureHandleNV> requires <VK_NV_ray_tracing>" );
  14525. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  14526. std::vector<DataType> data( dataSize / sizeof( DataType ) );
  14527. VkResult result = getDispatcher()->vkGetAccelerationStructureHandleNV( static_cast<VkDevice>( m_device ),
  14528. static_cast<VkAccelerationStructureNV>( m_accelerationStructure ),
  14529. data.size() * sizeof( DataType ),
  14530. reinterpret_cast<void *>( data.data() ) );
  14531. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" );
  14532. return data;
  14533. }
  14534. template <typename DataType>
  14535. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType AccelerationStructureNV::getHandle() const
  14536. {
  14537. VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureHandleNV && "Function <vkGetAccelerationStructureHandleNV> requires <VK_NV_ray_tracing>" );
  14538. DataType data;
  14539. VkResult result = getDispatcher()->vkGetAccelerationStructureHandleNV( static_cast<VkDevice>( m_device ),
  14540. static_cast<VkAccelerationStructureNV>( m_accelerationStructure ),
  14541. sizeof( DataType ),
  14542. reinterpret_cast<void *>( &data ) );
  14543. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureNV::getHandle" );
  14544. return data;
  14545. }
  14546. VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV(
  14547. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
  14548. VULKAN_HPP_NAMESPACE::QueryType queryType,
  14549. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  14550. uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
  14551. {
  14552. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV &&
  14553. "Function <vkCmdWriteAccelerationStructuresPropertiesNV> requires <VK_NV_ray_tracing>" );
  14554. getDispatcher()->vkCmdWriteAccelerationStructuresPropertiesNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14555. accelerationStructures.size(),
  14556. reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ),
  14557. static_cast<VkQueryType>( queryType ),
  14558. static_cast<VkQueryPool>( queryPool ),
  14559. firstQuery );
  14560. }
  14561. VULKAN_HPP_INLINE void Pipeline::compileDeferredNV( uint32_t shader ) const
  14562. {
  14563. VULKAN_HPP_ASSERT( getDispatcher()->vkCompileDeferredNV && "Function <vkCompileDeferredNV> requires <VK_NV_ray_tracing>" );
  14564. VkResult result = getDispatcher()->vkCompileDeferredNV( static_cast<VkDevice>( m_device ), static_cast<VkPipeline>( m_pipeline ), shader );
  14565. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Pipeline::compileDeferredNV" );
  14566. }
  14567. //=== VK_KHR_maintenance3 ===
  14568. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
  14569. Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
  14570. {
  14571. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupportKHR &&
  14572. "Function <vkGetDescriptorSetLayoutSupportKHR> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" );
  14573. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
  14574. getDispatcher()->vkGetDescriptorSetLayoutSupportKHR( static_cast<VkDevice>( m_device ),
  14575. reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
  14576. reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
  14577. return support;
  14578. }
  14579. template <typename X, typename Y, typename... Z>
  14580. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  14581. Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
  14582. {
  14583. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSupportKHR &&
  14584. "Function <vkGetDescriptorSetLayoutSupportKHR> requires <VK_KHR_maintenance3> or <VK_VERSION_1_1>" );
  14585. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  14586. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
  14587. getDispatcher()->vkGetDescriptorSetLayoutSupportKHR( static_cast<VkDevice>( m_device ),
  14588. reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
  14589. reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
  14590. return structureChain;
  14591. }
  14592. //=== VK_KHR_draw_indirect_count ===
  14593. VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
  14594. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  14595. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  14596. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  14597. uint32_t maxDrawCount,
  14598. uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  14599. {
  14600. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawIndirectCountKHR &&
  14601. "Function <vkCmdDrawIndirectCountKHR> requires <VK_AMD_draw_indirect_count> or <VK_KHR_draw_indirect_count> or <VK_VERSION_1_2>" );
  14602. getDispatcher()->vkCmdDrawIndirectCountKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14603. static_cast<VkBuffer>( buffer ),
  14604. static_cast<VkDeviceSize>( offset ),
  14605. static_cast<VkBuffer>( countBuffer ),
  14606. static_cast<VkDeviceSize>( countBufferOffset ),
  14607. maxDrawCount,
  14608. stride );
  14609. }
  14610. VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
  14611. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  14612. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  14613. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  14614. uint32_t maxDrawCount,
  14615. uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  14616. {
  14617. VULKAN_HPP_ASSERT(
  14618. getDispatcher()->vkCmdDrawIndexedIndirectCountKHR &&
  14619. "Function <vkCmdDrawIndexedIndirectCountKHR> requires <VK_AMD_draw_indirect_count> or <VK_KHR_draw_indirect_count> or <VK_VERSION_1_2>" );
  14620. getDispatcher()->vkCmdDrawIndexedIndirectCountKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14621. static_cast<VkBuffer>( buffer ),
  14622. static_cast<VkDeviceSize>( offset ),
  14623. static_cast<VkBuffer>( countBuffer ),
  14624. static_cast<VkDeviceSize>( countBufferOffset ),
  14625. maxDrawCount,
  14626. stride );
  14627. }
  14628. //=== VK_EXT_external_memory_host ===
  14629. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT
  14630. Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer ) const
  14631. {
  14632. VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryHostPointerPropertiesEXT &&
  14633. "Function <vkGetMemoryHostPointerPropertiesEXT> requires <VK_EXT_external_memory_host>" );
  14634. VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
  14635. VkResult result =
  14636. getDispatcher()->vkGetMemoryHostPointerPropertiesEXT( static_cast<VkDevice>( m_device ),
  14637. static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
  14638. pHostPointer,
  14639. reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) );
  14640. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" );
  14641. return memoryHostPointerProperties;
  14642. }
  14643. //=== VK_AMD_buffer_marker ===
  14644. VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
  14645. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  14646. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  14647. uint32_t marker ) const VULKAN_HPP_NOEXCEPT
  14648. {
  14649. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarkerAMD && "Function <vkCmdWriteBufferMarkerAMD> requires <VK_AMD_buffer_marker>" );
  14650. getDispatcher()->vkCmdWriteBufferMarkerAMD( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14651. static_cast<VkPipelineStageFlagBits>( pipelineStage ),
  14652. static_cast<VkBuffer>( dstBuffer ),
  14653. static_cast<VkDeviceSize>( dstOffset ),
  14654. marker );
  14655. }
  14656. //=== VK_EXT_calibrated_timestamps ===
  14657. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT> PhysicalDevice::getCalibrateableTimeDomainsEXT() const
  14658. {
  14659. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT &&
  14660. "Function <vkGetPhysicalDeviceCalibrateableTimeDomainsEXT> requires <VK_EXT_calibrated_timestamps>" );
  14661. std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT> timeDomains;
  14662. uint32_t timeDomainCount;
  14663. VkResult result;
  14664. do
  14665. {
  14666. result =
  14667. getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), &timeDomainCount, nullptr );
  14668. if ( ( result == VK_SUCCESS ) && timeDomainCount )
  14669. {
  14670. timeDomains.resize( timeDomainCount );
  14671. result = getDispatcher()->vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
  14672. static_cast<VkPhysicalDevice>( m_physicalDevice ), &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) );
  14673. }
  14674. } while ( result == VK_INCOMPLETE );
  14675. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
  14676. VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
  14677. if ( timeDomainCount < timeDomains.size() )
  14678. {
  14679. timeDomains.resize( timeDomainCount );
  14680. }
  14681. return timeDomains;
  14682. }
  14683. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<std::vector<uint64_t>, uint64_t> Device::getCalibratedTimestampsEXT(
  14684. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos ) const
  14685. {
  14686. VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsEXT && "Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps>" );
  14687. std::pair<std::vector<uint64_t>, uint64_t> data_( std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) );
  14688. std::vector<uint64_t> & timestamps = data_.first;
  14689. uint64_t & maxDeviation = data_.second;
  14690. VkResult result = getDispatcher()->vkGetCalibratedTimestampsEXT( static_cast<VkDevice>( m_device ),
  14691. timestampInfos.size(),
  14692. reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ),
  14693. timestamps.data(),
  14694. &maxDeviation );
  14695. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
  14696. return data_;
  14697. }
  14698. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<uint64_t, uint64_t>
  14699. Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo ) const
  14700. {
  14701. VULKAN_HPP_ASSERT( getDispatcher()->vkGetCalibratedTimestampsEXT && "Function <vkGetCalibratedTimestampsEXT> requires <VK_EXT_calibrated_timestamps>" );
  14702. std::pair<uint64_t, uint64_t> data_;
  14703. uint64_t & timestamp = data_.first;
  14704. uint64_t & maxDeviation = data_.second;
  14705. VkResult result = getDispatcher()->vkGetCalibratedTimestampsEXT(
  14706. static_cast<VkDevice>( m_device ), 1, reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( &timestampInfo ), &timestamp, &maxDeviation );
  14707. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" );
  14708. return data_;
  14709. }
  14710. //=== VK_NV_mesh_shader ===
  14711. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT
  14712. {
  14713. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksNV && "Function <vkCmdDrawMeshTasksNV> requires <VK_NV_mesh_shader>" );
  14714. getDispatcher()->vkCmdDrawMeshTasksNV( static_cast<VkCommandBuffer>( m_commandBuffer ), taskCount, firstTask );
  14715. }
  14716. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
  14717. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  14718. uint32_t drawCount,
  14719. uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  14720. {
  14721. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectNV && "Function <vkCmdDrawMeshTasksIndirectNV> requires <VK_NV_mesh_shader>" );
  14722. getDispatcher()->vkCmdDrawMeshTasksIndirectNV(
  14723. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
  14724. }
  14725. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
  14726. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  14727. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  14728. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  14729. uint32_t maxDrawCount,
  14730. uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  14731. {
  14732. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectCountNV && "Function <vkCmdDrawMeshTasksIndirectCountNV> requires <VK_NV_mesh_shader>" );
  14733. getDispatcher()->vkCmdDrawMeshTasksIndirectCountNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14734. static_cast<VkBuffer>( buffer ),
  14735. static_cast<VkDeviceSize>( offset ),
  14736. static_cast<VkBuffer>( countBuffer ),
  14737. static_cast<VkDeviceSize>( countBufferOffset ),
  14738. maxDrawCount,
  14739. stride );
  14740. }
  14741. //=== VK_NV_scissor_exclusive ===
  14742. VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorEnableNV(
  14743. uint32_t firstExclusiveScissor,
  14744. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & exclusiveScissorEnables ) const VULKAN_HPP_NOEXCEPT
  14745. {
  14746. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExclusiveScissorEnableNV &&
  14747. "Function <vkCmdSetExclusiveScissorEnableNV> requires <VK_NV_scissor_exclusive>" );
  14748. getDispatcher()->vkCmdSetExclusiveScissorEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14749. firstExclusiveScissor,
  14750. exclusiveScissorEnables.size(),
  14751. reinterpret_cast<const VkBool32 *>( exclusiveScissorEnables.data() ) );
  14752. }
  14753. VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV(
  14754. uint32_t firstExclusiveScissor, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors ) const VULKAN_HPP_NOEXCEPT
  14755. {
  14756. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExclusiveScissorNV && "Function <vkCmdSetExclusiveScissorNV> requires <VK_NV_scissor_exclusive>" );
  14757. getDispatcher()->vkCmdSetExclusiveScissorNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14758. firstExclusiveScissor,
  14759. exclusiveScissors.size(),
  14760. reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) );
  14761. }
  14762. //=== VK_NV_device_diagnostic_checkpoints ===
  14763. template <typename CheckpointMarkerType>
  14764. VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker ) const VULKAN_HPP_NOEXCEPT
  14765. {
  14766. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCheckpointNV && "Function <vkCmdSetCheckpointNV> requires <VK_NV_device_diagnostic_checkpoints>" );
  14767. getDispatcher()->vkCmdSetCheckpointNV( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const void *>( &checkpointMarker ) );
  14768. }
  14769. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV> Queue::getCheckpointDataNV() const
  14770. {
  14771. VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointDataNV &&
  14772. "Function <vkGetQueueCheckpointDataNV> requires <VK_NV_device_diagnostic_checkpoints>" );
  14773. std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV> checkpointData;
  14774. uint32_t checkpointDataCount;
  14775. getDispatcher()->vkGetQueueCheckpointDataNV( static_cast<VkQueue>( m_queue ), &checkpointDataCount, nullptr );
  14776. checkpointData.resize( checkpointDataCount );
  14777. getDispatcher()->vkGetQueueCheckpointDataNV(
  14778. static_cast<VkQueue>( m_queue ), &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
  14779. VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
  14780. if ( checkpointDataCount < checkpointData.size() )
  14781. {
  14782. checkpointData.resize( checkpointDataCount );
  14783. }
  14784. return checkpointData;
  14785. }
  14786. //=== VK_KHR_timeline_semaphore ===
  14787. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Semaphore::getCounterValueKHR() const
  14788. {
  14789. VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreCounterValueKHR &&
  14790. "Function <vkGetSemaphoreCounterValueKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" );
  14791. uint64_t value;
  14792. VkResult result = getDispatcher()->vkGetSemaphoreCounterValueKHR( static_cast<VkDevice>( m_device ), static_cast<VkSemaphore>( m_semaphore ), &value );
  14793. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Semaphore::getCounterValueKHR" );
  14794. return value;
  14795. }
  14796. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo,
  14797. uint64_t timeout ) const
  14798. {
  14799. VULKAN_HPP_ASSERT( getDispatcher()->vkWaitSemaphoresKHR && "Function <vkWaitSemaphoresKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" );
  14800. VkResult result =
  14801. getDispatcher()->vkWaitSemaphoresKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
  14802. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14803. VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR",
  14804. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
  14805. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  14806. }
  14807. VULKAN_HPP_INLINE void Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo ) const
  14808. {
  14809. VULKAN_HPP_ASSERT( getDispatcher()->vkSignalSemaphoreKHR && "Function <vkSignalSemaphoreKHR> requires <VK_KHR_timeline_semaphore> or <VK_VERSION_1_2>" );
  14810. VkResult result =
  14811. getDispatcher()->vkSignalSemaphoreKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
  14812. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" );
  14813. }
  14814. //=== VK_INTEL_performance_query ===
  14815. VULKAN_HPP_INLINE void Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo ) const
  14816. {
  14817. VULKAN_HPP_ASSERT( getDispatcher()->vkInitializePerformanceApiINTEL &&
  14818. "Function <vkInitializePerformanceApiINTEL> requires <VK_INTEL_performance_query>" );
  14819. VkResult result = getDispatcher()->vkInitializePerformanceApiINTEL( static_cast<VkDevice>( m_device ),
  14820. reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) );
  14821. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" );
  14822. }
  14823. VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL() const VULKAN_HPP_NOEXCEPT
  14824. {
  14825. VULKAN_HPP_ASSERT( getDispatcher()->vkUninitializePerformanceApiINTEL &&
  14826. "Function <vkUninitializePerformanceApiINTEL> requires <VK_INTEL_performance_query>" );
  14827. getDispatcher()->vkUninitializePerformanceApiINTEL( static_cast<VkDevice>( m_device ) );
  14828. }
  14829. VULKAN_HPP_INLINE void CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo ) const
  14830. {
  14831. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceMarkerINTEL && "Function <vkCmdSetPerformanceMarkerINTEL> requires <VK_INTEL_performance_query>" );
  14832. VkResult result = getDispatcher()->vkCmdSetPerformanceMarkerINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14833. reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) );
  14834. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" );
  14835. }
  14836. VULKAN_HPP_INLINE void CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo ) const
  14837. {
  14838. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL &&
  14839. "Function <vkCmdSetPerformanceStreamMarkerINTEL> requires <VK_INTEL_performance_query>" );
  14840. VkResult result = getDispatcher()->vkCmdSetPerformanceStreamMarkerINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14841. reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) );
  14842. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" );
  14843. }
  14844. VULKAN_HPP_INLINE void CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo ) const
  14845. {
  14846. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPerformanceOverrideINTEL &&
  14847. "Function <vkCmdSetPerformanceOverrideINTEL> requires <VK_INTEL_performance_query>" );
  14848. VkResult result = getDispatcher()->vkCmdSetPerformanceOverrideINTEL( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14849. reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) );
  14850. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" );
  14851. }
  14852. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL
  14853. Device::acquirePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const & acquireInfo ) const
  14854. {
  14855. return VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL( *this, acquireInfo );
  14856. }
  14857. VULKAN_HPP_INLINE void Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration ) const
  14858. {
  14859. VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSetPerformanceConfigurationINTEL &&
  14860. "Function <vkQueueSetPerformanceConfigurationINTEL> requires <VK_INTEL_performance_query>" );
  14861. VkResult result = getDispatcher()->vkQueueSetPerformanceConfigurationINTEL( static_cast<VkQueue>( m_queue ),
  14862. static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
  14863. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" );
  14864. }
  14865. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PerformanceValueINTEL
  14866. Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter ) const
  14867. {
  14868. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPerformanceParameterINTEL && "Function <vkGetPerformanceParameterINTEL> requires <VK_INTEL_performance_query>" );
  14869. VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value;
  14870. VkResult result = getDispatcher()->vkGetPerformanceParameterINTEL(
  14871. static_cast<VkDevice>( m_device ), static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( &value ) );
  14872. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" );
  14873. return value;
  14874. }
  14875. //=== VK_AMD_display_native_hdr ===
  14876. VULKAN_HPP_INLINE void SwapchainKHR::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT
  14877. {
  14878. VULKAN_HPP_ASSERT( getDispatcher()->vkSetLocalDimmingAMD && "Function <vkSetLocalDimmingAMD> requires <VK_AMD_display_native_hdr>" );
  14879. getDispatcher()->vkSetLocalDimmingAMD(
  14880. static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), static_cast<VkBool32>( localDimmingEnable ) );
  14881. }
  14882. # if defined( VK_USE_PLATFORM_FUCHSIA )
  14883. //=== VK_FUCHSIA_imagepipe_surface ===
  14884. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  14885. Instance::createImagePipeSurfaceFUCHSIA( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const & createInfo,
  14886. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  14887. {
  14888. return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
  14889. }
  14890. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  14891. # if defined( VK_USE_PLATFORM_METAL_EXT )
  14892. //=== VK_EXT_metal_surface ===
  14893. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  14894. Instance::createMetalSurfaceEXT( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const & createInfo,
  14895. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  14896. {
  14897. return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
  14898. }
  14899. # endif /*VK_USE_PLATFORM_METAL_EXT*/
  14900. //=== VK_KHR_fragment_shading_rate ===
  14901. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>
  14902. PhysicalDevice::getFragmentShadingRatesKHR() const
  14903. {
  14904. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR &&
  14905. "Function <vkGetPhysicalDeviceFragmentShadingRatesKHR> requires <VK_KHR_fragment_shading_rate>" );
  14906. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR> fragmentShadingRates;
  14907. uint32_t fragmentShadingRateCount;
  14908. VkResult result;
  14909. do
  14910. {
  14911. result =
  14912. getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &fragmentShadingRateCount, nullptr );
  14913. if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount )
  14914. {
  14915. fragmentShadingRates.resize( fragmentShadingRateCount );
  14916. result = getDispatcher()->vkGetPhysicalDeviceFragmentShadingRatesKHR(
  14917. static_cast<VkPhysicalDevice>( m_physicalDevice ),
  14918. &fragmentShadingRateCount,
  14919. reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) );
  14920. }
  14921. } while ( result == VK_INCOMPLETE );
  14922. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
  14923. VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
  14924. if ( fragmentShadingRateCount < fragmentShadingRates.size() )
  14925. {
  14926. fragmentShadingRates.resize( fragmentShadingRateCount );
  14927. }
  14928. return fragmentShadingRates;
  14929. }
  14930. VULKAN_HPP_INLINE void
  14931. CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,
  14932. const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT
  14933. {
  14934. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFragmentShadingRateKHR &&
  14935. "Function <vkCmdSetFragmentShadingRateKHR> requires <VK_KHR_fragment_shading_rate>" );
  14936. getDispatcher()->vkCmdSetFragmentShadingRateKHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  14937. reinterpret_cast<const VkExtent2D *>( &fragmentSize ),
  14938. reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
  14939. }
  14940. //=== VK_EXT_buffer_device_address ===
  14941. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
  14942. Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
  14943. {
  14944. VULKAN_HPP_ASSERT(
  14945. getDispatcher()->vkGetBufferDeviceAddressEXT &&
  14946. "Function <vkGetBufferDeviceAddressEXT> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
  14947. VkDeviceAddress result =
  14948. getDispatcher()->vkGetBufferDeviceAddressEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  14949. return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
  14950. }
  14951. //=== VK_EXT_tooling_info ===
  14952. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> PhysicalDevice::getToolPropertiesEXT() const
  14953. {
  14954. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT &&
  14955. "Function <vkGetPhysicalDeviceToolPropertiesEXT> requires <VK_EXT_tooling_info> or <VK_VERSION_1_3>" );
  14956. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties> toolProperties;
  14957. uint32_t toolCount;
  14958. VkResult result;
  14959. do
  14960. {
  14961. result = getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), &toolCount, nullptr );
  14962. if ( ( result == VK_SUCCESS ) && toolCount )
  14963. {
  14964. toolProperties.resize( toolCount );
  14965. result = getDispatcher()->vkGetPhysicalDeviceToolPropertiesEXT(
  14966. static_cast<VkPhysicalDevice>( m_physicalDevice ), &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
  14967. }
  14968. } while ( result == VK_INCOMPLETE );
  14969. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
  14970. VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
  14971. if ( toolCount < toolProperties.size() )
  14972. {
  14973. toolProperties.resize( toolCount );
  14974. }
  14975. return toolProperties;
  14976. }
  14977. //=== VK_KHR_present_wait ===
  14978. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result SwapchainKHR::waitForPresent( uint64_t presentId, uint64_t timeout ) const
  14979. {
  14980. VULKAN_HPP_ASSERT( getDispatcher()->vkWaitForPresentKHR && "Function <vkWaitForPresentKHR> requires <VK_KHR_present_wait>" );
  14981. VkResult result =
  14982. getDispatcher()->vkWaitForPresentKHR( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), presentId, timeout );
  14983. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14984. VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::waitForPresent",
  14985. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  14986. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  14987. }
  14988. //=== VK_NV_cooperative_matrix ===
  14989. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>
  14990. PhysicalDevice::getCooperativeMatrixPropertiesNV() const
  14991. {
  14992. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV &&
  14993. "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesNV> requires <VK_NV_cooperative_matrix>" );
  14994. std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV> properties;
  14995. uint32_t propertyCount;
  14996. VkResult result;
  14997. do
  14998. {
  14999. result =
  15000. getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
  15001. if ( ( result == VK_SUCCESS ) && propertyCount )
  15002. {
  15003. properties.resize( propertyCount );
  15004. result = getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
  15005. static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) );
  15006. }
  15007. } while ( result == VK_INCOMPLETE );
  15008. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
  15009. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  15010. if ( propertyCount < properties.size() )
  15011. {
  15012. properties.resize( propertyCount );
  15013. }
  15014. return properties;
  15015. }
  15016. //=== VK_NV_coverage_reduction_mode ===
  15017. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>
  15018. PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV() const
  15019. {
  15020. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV &&
  15021. "Function <vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV> requires <VK_NV_coverage_reduction_mode>" );
  15022. std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV> combinations;
  15023. uint32_t combinationCount;
  15024. VkResult result;
  15025. do
  15026. {
  15027. result = getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
  15028. static_cast<VkPhysicalDevice>( m_physicalDevice ), &combinationCount, nullptr );
  15029. if ( ( result == VK_SUCCESS ) && combinationCount )
  15030. {
  15031. combinations.resize( combinationCount );
  15032. result = getDispatcher()->vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
  15033. static_cast<VkPhysicalDevice>( m_physicalDevice ),
  15034. &combinationCount,
  15035. reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) );
  15036. }
  15037. } while ( result == VK_INCOMPLETE );
  15038. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  15039. VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
  15040. VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
  15041. if ( combinationCount < combinations.size() )
  15042. {
  15043. combinations.resize( combinationCount );
  15044. }
  15045. return combinations;
  15046. }
  15047. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  15048. //=== VK_EXT_full_screen_exclusive ===
  15049. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR>
  15050. PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
  15051. {
  15052. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT &&
  15053. "Function <vkGetPhysicalDeviceSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" );
  15054. std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR> presentModes;
  15055. uint32_t presentModeCount;
  15056. VkResult result;
  15057. do
  15058. {
  15059. result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  15060. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  15061. &presentModeCount,
  15062. nullptr );
  15063. if ( ( result == VK_SUCCESS ) && presentModeCount )
  15064. {
  15065. presentModes.resize( presentModeCount );
  15066. result = getDispatcher()->vkGetPhysicalDeviceSurfacePresentModes2EXT( static_cast<VkPhysicalDevice>( m_physicalDevice ),
  15067. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  15068. &presentModeCount,
  15069. reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
  15070. }
  15071. } while ( result == VK_INCOMPLETE );
  15072. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
  15073. VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
  15074. if ( presentModeCount < presentModes.size() )
  15075. {
  15076. presentModes.resize( presentModeCount );
  15077. }
  15078. return presentModes;
  15079. }
  15080. VULKAN_HPP_INLINE void SwapchainKHR::acquireFullScreenExclusiveModeEXT() const
  15081. {
  15082. VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireFullScreenExclusiveModeEXT &&
  15083. "Function <vkAcquireFullScreenExclusiveModeEXT> requires <VK_EXT_full_screen_exclusive>" );
  15084. VkResult result = getDispatcher()->vkAcquireFullScreenExclusiveModeEXT( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ) );
  15085. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::acquireFullScreenExclusiveModeEXT" );
  15086. }
  15087. VULKAN_HPP_INLINE void SwapchainKHR::releaseFullScreenExclusiveModeEXT() const
  15088. {
  15089. VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseFullScreenExclusiveModeEXT &&
  15090. "Function <vkReleaseFullScreenExclusiveModeEXT> requires <VK_EXT_full_screen_exclusive>" );
  15091. VkResult result = getDispatcher()->vkReleaseFullScreenExclusiveModeEXT( static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ) );
  15092. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::releaseFullScreenExclusiveModeEXT" );
  15093. }
  15094. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR
  15095. Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo ) const
  15096. {
  15097. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT &&
  15098. "Function <vkGetDeviceGroupSurfacePresentModes2EXT> requires <VK_EXT_full_screen_exclusive>" );
  15099. VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
  15100. VkResult result = getDispatcher()->vkGetDeviceGroupSurfacePresentModes2EXT( static_cast<VkDevice>( m_device ),
  15101. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  15102. reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
  15103. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" );
  15104. return modes;
  15105. }
  15106. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  15107. //=== VK_EXT_headless_surface ===
  15108. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  15109. Instance::createHeadlessSurfaceEXT( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const & createInfo,
  15110. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  15111. {
  15112. return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
  15113. }
  15114. //=== VK_KHR_buffer_device_address ===
  15115. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
  15116. Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
  15117. {
  15118. VULKAN_HPP_ASSERT(
  15119. getDispatcher()->vkGetBufferDeviceAddressKHR &&
  15120. "Function <vkGetBufferDeviceAddressKHR> requires <VK_EXT_buffer_device_address> or <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
  15121. VkDeviceAddress result =
  15122. getDispatcher()->vkGetBufferDeviceAddressKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  15123. return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
  15124. }
  15125. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
  15126. Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
  15127. {
  15128. VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR &&
  15129. "Function <vkGetBufferOpaqueCaptureAddressKHR> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
  15130. uint64_t result =
  15131. getDispatcher()->vkGetBufferOpaqueCaptureAddressKHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  15132. return result;
  15133. }
  15134. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t
  15135. Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info ) const VULKAN_HPP_NOEXCEPT
  15136. {
  15137. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR &&
  15138. "Function <vkGetDeviceMemoryOpaqueCaptureAddressKHR> requires <VK_KHR_buffer_device_address> or <VK_VERSION_1_2>" );
  15139. uint64_t result = getDispatcher()->vkGetDeviceMemoryOpaqueCaptureAddressKHR( static_cast<VkDevice>( m_device ),
  15140. reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
  15141. return result;
  15142. }
  15143. //=== VK_EXT_line_rasterization ===
  15144. VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT
  15145. {
  15146. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEXT && "Function <vkCmdSetLineStippleEXT> requires <VK_EXT_line_rasterization>" );
  15147. getDispatcher()->vkCmdSetLineStippleEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), lineStippleFactor, lineStipplePattern );
  15148. }
  15149. //=== VK_EXT_host_query_reset ===
  15150. VULKAN_HPP_INLINE void QueryPool::resetEXT( uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
  15151. {
  15152. VULKAN_HPP_ASSERT( getDispatcher()->vkResetQueryPoolEXT && "Function <vkResetQueryPoolEXT> requires <VK_EXT_host_query_reset> or <VK_VERSION_1_2>" );
  15153. getDispatcher()->vkResetQueryPoolEXT( static_cast<VkDevice>( m_device ), static_cast<VkQueryPool>( m_queryPool ), firstQuery, queryCount );
  15154. }
  15155. //=== VK_EXT_extended_dynamic_state ===
  15156. VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
  15157. {
  15158. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCullModeEXT &&
  15159. "Function <vkCmdSetCullModeEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  15160. getDispatcher()->vkCmdSetCullModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCullModeFlags>( cullMode ) );
  15161. }
  15162. VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
  15163. {
  15164. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFrontFaceEXT &&
  15165. "Function <vkCmdSetFrontFaceEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  15166. getDispatcher()->vkCmdSetFrontFaceEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkFrontFace>( frontFace ) );
  15167. }
  15168. VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
  15169. {
  15170. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPrimitiveTopologyEXT &&
  15171. "Function <vkCmdSetPrimitiveTopologyEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  15172. getDispatcher()->vkCmdSetPrimitiveTopologyEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPrimitiveTopology>( primitiveTopology ) );
  15173. }
  15174. VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT(
  15175. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports ) const VULKAN_HPP_NOEXCEPT
  15176. {
  15177. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWithCountEXT &&
  15178. "Function <vkCmdSetViewportWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  15179. getDispatcher()->vkCmdSetViewportWithCountEXT(
  15180. static_cast<VkCommandBuffer>( m_commandBuffer ), viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
  15181. }
  15182. VULKAN_HPP_INLINE void
  15183. CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors ) const VULKAN_HPP_NOEXCEPT
  15184. {
  15185. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetScissorWithCountEXT &&
  15186. "Function <vkCmdSetScissorWithCountEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  15187. getDispatcher()->vkCmdSetScissorWithCountEXT(
  15188. static_cast<VkCommandBuffer>( m_commandBuffer ), scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
  15189. }
  15190. VULKAN_HPP_INLINE void
  15191. CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding,
  15192. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
  15193. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
  15194. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
  15195. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides ) const
  15196. {
  15197. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindVertexBuffers2EXT &&
  15198. "Function <vkCmdBindVertexBuffers2EXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  15199. if ( buffers.size() != offsets.size() )
  15200. {
  15201. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
  15202. }
  15203. if ( !sizes.empty() && buffers.size() != sizes.size() )
  15204. {
  15205. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
  15206. }
  15207. if ( !strides.empty() && buffers.size() != strides.size() )
  15208. {
  15209. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
  15210. }
  15211. getDispatcher()->vkCmdBindVertexBuffers2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  15212. firstBinding,
  15213. buffers.size(),
  15214. reinterpret_cast<const VkBuffer *>( buffers.data() ),
  15215. reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
  15216. reinterpret_cast<const VkDeviceSize *>( sizes.data() ),
  15217. reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
  15218. }
  15219. VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
  15220. {
  15221. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthTestEnableEXT &&
  15222. "Function <vkCmdSetDepthTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  15223. getDispatcher()->vkCmdSetDepthTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthTestEnable ) );
  15224. }
  15225. VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
  15226. {
  15227. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthWriteEnableEXT &&
  15228. "Function <vkCmdSetDepthWriteEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  15229. getDispatcher()->vkCmdSetDepthWriteEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthWriteEnable ) );
  15230. }
  15231. VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
  15232. {
  15233. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthCompareOpEXT &&
  15234. "Function <vkCmdSetDepthCompareOpEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  15235. getDispatcher()->vkCmdSetDepthCompareOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkCompareOp>( depthCompareOp ) );
  15236. }
  15237. VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
  15238. {
  15239. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT &&
  15240. "Function <vkCmdSetDepthBoundsTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  15241. getDispatcher()->vkCmdSetDepthBoundsTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBoundsTestEnable ) );
  15242. }
  15243. VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
  15244. {
  15245. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilTestEnableEXT &&
  15246. "Function <vkCmdSetStencilTestEnableEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  15247. getDispatcher()->vkCmdSetStencilTestEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stencilTestEnable ) );
  15248. }
  15249. VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
  15250. VULKAN_HPP_NAMESPACE::StencilOp failOp,
  15251. VULKAN_HPP_NAMESPACE::StencilOp passOp,
  15252. VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
  15253. VULKAN_HPP_NAMESPACE::CompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
  15254. {
  15255. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetStencilOpEXT &&
  15256. "Function <vkCmdSetStencilOpEXT> requires <VK_EXT_extended_dynamic_state> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  15257. getDispatcher()->vkCmdSetStencilOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  15258. static_cast<VkStencilFaceFlags>( faceMask ),
  15259. static_cast<VkStencilOp>( failOp ),
  15260. static_cast<VkStencilOp>( passOp ),
  15261. static_cast<VkStencilOp>( depthFailOp ),
  15262. static_cast<VkCompareOp>( compareOp ) );
  15263. }
  15264. //=== VK_KHR_deferred_host_operations ===
  15265. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR
  15266. Device::createDeferredOperationKHR( VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  15267. {
  15268. return VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR( *this, allocator );
  15269. }
  15270. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t DeferredOperationKHR::getMaxConcurrency() const VULKAN_HPP_NOEXCEPT
  15271. {
  15272. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR &&
  15273. "Function <vkGetDeferredOperationMaxConcurrencyKHR> requires <VK_KHR_deferred_host_operations>" );
  15274. uint32_t result =
  15275. getDispatcher()->vkGetDeferredOperationMaxConcurrencyKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( m_operation ) );
  15276. return result;
  15277. }
  15278. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result DeferredOperationKHR::getResult() const VULKAN_HPP_NOEXCEPT
  15279. {
  15280. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeferredOperationResultKHR &&
  15281. "Function <vkGetDeferredOperationResultKHR> requires <VK_KHR_deferred_host_operations>" );
  15282. VkResult result =
  15283. getDispatcher()->vkGetDeferredOperationResultKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( m_operation ) );
  15284. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  15285. }
  15286. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result DeferredOperationKHR::join() const
  15287. {
  15288. VULKAN_HPP_ASSERT( getDispatcher()->vkDeferredOperationJoinKHR && "Function <vkDeferredOperationJoinKHR> requires <VK_KHR_deferred_host_operations>" );
  15289. VkResult result = getDispatcher()->vkDeferredOperationJoinKHR( static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( m_operation ) );
  15290. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  15291. VULKAN_HPP_NAMESPACE_STRING "::DeferredOperationKHR::join",
  15292. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } );
  15293. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  15294. }
  15295. //=== VK_KHR_pipeline_executable_properties ===
  15296. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>
  15297. Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo ) const
  15298. {
  15299. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutablePropertiesKHR &&
  15300. "Function <vkGetPipelineExecutablePropertiesKHR> requires <VK_KHR_pipeline_executable_properties>" );
  15301. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR> properties;
  15302. uint32_t executableCount;
  15303. VkResult result;
  15304. do
  15305. {
  15306. result = getDispatcher()->vkGetPipelineExecutablePropertiesKHR(
  15307. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr );
  15308. if ( ( result == VK_SUCCESS ) && executableCount )
  15309. {
  15310. properties.resize( executableCount );
  15311. result = getDispatcher()->vkGetPipelineExecutablePropertiesKHR( static_cast<VkDevice>( m_device ),
  15312. reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
  15313. &executableCount,
  15314. reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) );
  15315. }
  15316. } while ( result == VK_INCOMPLETE );
  15317. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
  15318. VULKAN_HPP_ASSERT( executableCount <= properties.size() );
  15319. if ( executableCount < properties.size() )
  15320. {
  15321. properties.resize( executableCount );
  15322. }
  15323. return properties;
  15324. }
  15325. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>
  15326. Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const
  15327. {
  15328. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutableStatisticsKHR &&
  15329. "Function <vkGetPipelineExecutableStatisticsKHR> requires <VK_KHR_pipeline_executable_properties>" );
  15330. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR> statistics;
  15331. uint32_t statisticCount;
  15332. VkResult result;
  15333. do
  15334. {
  15335. result = getDispatcher()->vkGetPipelineExecutableStatisticsKHR(
  15336. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr );
  15337. if ( ( result == VK_SUCCESS ) && statisticCount )
  15338. {
  15339. statistics.resize( statisticCount );
  15340. result = getDispatcher()->vkGetPipelineExecutableStatisticsKHR( static_cast<VkDevice>( m_device ),
  15341. reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
  15342. &statisticCount,
  15343. reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) );
  15344. }
  15345. } while ( result == VK_INCOMPLETE );
  15346. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
  15347. VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
  15348. if ( statisticCount < statistics.size() )
  15349. {
  15350. statistics.resize( statisticCount );
  15351. }
  15352. return statistics;
  15353. }
  15354. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>
  15355. Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo ) const
  15356. {
  15357. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR &&
  15358. "Function <vkGetPipelineExecutableInternalRepresentationsKHR> requires <VK_KHR_pipeline_executable_properties>" );
  15359. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR> internalRepresentations;
  15360. uint32_t internalRepresentationCount;
  15361. VkResult result;
  15362. do
  15363. {
  15364. result = getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR(
  15365. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr );
  15366. if ( ( result == VK_SUCCESS ) && internalRepresentationCount )
  15367. {
  15368. internalRepresentations.resize( internalRepresentationCount );
  15369. result = getDispatcher()->vkGetPipelineExecutableInternalRepresentationsKHR(
  15370. static_cast<VkDevice>( m_device ),
  15371. reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
  15372. &internalRepresentationCount,
  15373. reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) );
  15374. }
  15375. } while ( result == VK_INCOMPLETE );
  15376. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  15377. VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
  15378. VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
  15379. if ( internalRepresentationCount < internalRepresentations.size() )
  15380. {
  15381. internalRepresentations.resize( internalRepresentationCount );
  15382. }
  15383. return internalRepresentations;
  15384. }
  15385. //=== VK_EXT_host_image_copy ===
  15386. VULKAN_HPP_INLINE void Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo ) const
  15387. {
  15388. VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToImageEXT && "Function <vkCopyMemoryToImageEXT> requires <VK_EXT_host_image_copy>" );
  15389. VkResult result = getDispatcher()->vkCopyMemoryToImageEXT( static_cast<VkDevice>( m_device ),
  15390. reinterpret_cast<const VkCopyMemoryToImageInfoEXT *>( &copyMemoryToImageInfo ) );
  15391. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" );
  15392. }
  15393. VULKAN_HPP_INLINE void Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo ) const
  15394. {
  15395. VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToMemoryEXT && "Function <vkCopyImageToMemoryEXT> requires <VK_EXT_host_image_copy>" );
  15396. VkResult result = getDispatcher()->vkCopyImageToMemoryEXT( static_cast<VkDevice>( m_device ),
  15397. reinterpret_cast<const VkCopyImageToMemoryInfoEXT *>( &copyImageToMemoryInfo ) );
  15398. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" );
  15399. }
  15400. VULKAN_HPP_INLINE void Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo ) const
  15401. {
  15402. VULKAN_HPP_ASSERT( getDispatcher()->vkCopyImageToImageEXT && "Function <vkCopyImageToImageEXT> requires <VK_EXT_host_image_copy>" );
  15403. VkResult result = getDispatcher()->vkCopyImageToImageEXT( static_cast<VkDevice>( m_device ),
  15404. reinterpret_cast<const VkCopyImageToImageInfoEXT *>( &copyImageToImageInfo ) );
  15405. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" );
  15406. }
  15407. VULKAN_HPP_INLINE void Device::transitionImageLayoutEXT(
  15408. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT> const & transitions ) const
  15409. {
  15410. VULKAN_HPP_ASSERT( getDispatcher()->vkTransitionImageLayoutEXT && "Function <vkTransitionImageLayoutEXT> requires <VK_EXT_host_image_copy>" );
  15411. VkResult result = getDispatcher()->vkTransitionImageLayoutEXT(
  15412. static_cast<VkDevice>( m_device ), transitions.size(), reinterpret_cast<const VkHostImageLayoutTransitionInfoEXT *>( transitions.data() ) );
  15413. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" );
  15414. }
  15415. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
  15416. Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT
  15417. {
  15418. VULKAN_HPP_ASSERT(
  15419. getDispatcher()->vkGetImageSubresourceLayout2EXT &&
  15420. "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
  15421. VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
  15422. getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ),
  15423. static_cast<VkImage>( m_image ),
  15424. reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
  15425. reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
  15426. return layout;
  15427. }
  15428. template <typename X, typename Y, typename... Z>
  15429. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  15430. Image::getSubresourceLayout2EXT( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT
  15431. {
  15432. VULKAN_HPP_ASSERT(
  15433. getDispatcher()->vkGetImageSubresourceLayout2EXT &&
  15434. "Function <vkGetImageSubresourceLayout2EXT> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
  15435. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  15436. VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
  15437. getDispatcher()->vkGetImageSubresourceLayout2EXT( static_cast<VkDevice>( m_device ),
  15438. static_cast<VkImage>( m_image ),
  15439. reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
  15440. reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
  15441. return structureChain;
  15442. }
  15443. //=== VK_KHR_map_memory2 ===
  15444. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo ) const
  15445. {
  15446. VULKAN_HPP_ASSERT( getDispatcher()->vkMapMemory2KHR && "Function <vkMapMemory2KHR> requires <VK_KHR_map_memory2>" );
  15447. void * pData;
  15448. VkResult result =
  15449. getDispatcher()->vkMapMemory2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryMapInfoKHR *>( &memoryMapInfo ), &pData );
  15450. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" );
  15451. return pData;
  15452. }
  15453. VULKAN_HPP_INLINE void Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo ) const VULKAN_HPP_NOEXCEPT
  15454. {
  15455. VULKAN_HPP_ASSERT( getDispatcher()->vkUnmapMemory2KHR && "Function <vkUnmapMemory2KHR> requires <VK_KHR_map_memory2>" );
  15456. getDispatcher()->vkUnmapMemory2KHR( static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryUnmapInfoKHR *>( &memoryUnmapInfo ) );
  15457. }
  15458. //=== VK_EXT_swapchain_maintenance1 ===
  15459. VULKAN_HPP_INLINE void Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo ) const
  15460. {
  15461. VULKAN_HPP_ASSERT( getDispatcher()->vkReleaseSwapchainImagesEXT && "Function <vkReleaseSwapchainImagesEXT> requires <VK_EXT_swapchain_maintenance1>" );
  15462. VkResult result = getDispatcher()->vkReleaseSwapchainImagesEXT( static_cast<VkDevice>( m_device ),
  15463. reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( &releaseInfo ) );
  15464. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" );
  15465. }
  15466. //=== VK_NV_device_generated_commands ===
  15467. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  15468. Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT
  15469. {
  15470. VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV &&
  15471. "Function <vkGetGeneratedCommandsMemoryRequirementsNV> requires <VK_NV_device_generated_commands>" );
  15472. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  15473. getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
  15474. reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
  15475. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  15476. return memoryRequirements;
  15477. }
  15478. template <typename X, typename Y, typename... Z>
  15479. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  15480. Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info ) const VULKAN_HPP_NOEXCEPT
  15481. {
  15482. VULKAN_HPP_ASSERT( getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV &&
  15483. "Function <vkGetGeneratedCommandsMemoryRequirementsNV> requires <VK_NV_device_generated_commands>" );
  15484. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  15485. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  15486. getDispatcher()->vkGetGeneratedCommandsMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
  15487. reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
  15488. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  15489. return structureChain;
  15490. }
  15491. VULKAN_HPP_INLINE void
  15492. CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
  15493. {
  15494. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPreprocessGeneratedCommandsNV &&
  15495. "Function <vkCmdPreprocessGeneratedCommandsNV> requires <VK_NV_device_generated_commands>" );
  15496. getDispatcher()->vkCmdPreprocessGeneratedCommandsNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  15497. reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
  15498. }
  15499. VULKAN_HPP_INLINE void
  15500. CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
  15501. const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
  15502. {
  15503. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdExecuteGeneratedCommandsNV &&
  15504. "Function <vkCmdExecuteGeneratedCommandsNV> requires <VK_NV_device_generated_commands>" );
  15505. getDispatcher()->vkCmdExecuteGeneratedCommandsNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  15506. static_cast<VkBool32>( isPreprocessed ),
  15507. reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
  15508. }
  15509. VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  15510. VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  15511. uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT
  15512. {
  15513. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindPipelineShaderGroupNV &&
  15514. "Function <vkCmdBindPipelineShaderGroupNV> requires <VK_NV_device_generated_commands>" );
  15515. getDispatcher()->vkCmdBindPipelineShaderGroupNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  15516. static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
  15517. static_cast<VkPipeline>( pipeline ),
  15518. groupIndex );
  15519. }
  15520. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV
  15521. Device::createIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const & createInfo,
  15522. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  15523. {
  15524. return VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV( *this, createInfo, allocator );
  15525. }
  15526. //=== VK_EXT_depth_bias_control ===
  15527. VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo ) const VULKAN_HPP_NOEXCEPT
  15528. {
  15529. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBias2EXT && "Function <vkCmdSetDepthBias2EXT> requires <VK_EXT_depth_bias_control>" );
  15530. getDispatcher()->vkCmdSetDepthBias2EXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkDepthBiasInfoEXT *>( &depthBiasInfo ) );
  15531. }
  15532. //=== VK_EXT_acquire_drm_display ===
  15533. VULKAN_HPP_INLINE void PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display ) const
  15534. {
  15535. VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireDrmDisplayEXT && "Function <vkAcquireDrmDisplayEXT> requires <VK_EXT_acquire_drm_display>" );
  15536. VkResult result =
  15537. getDispatcher()->vkAcquireDrmDisplayEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), drmFd, static_cast<VkDisplayKHR>( display ) );
  15538. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" );
  15539. }
  15540. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId ) const
  15541. {
  15542. return VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, drmFd, connectorId );
  15543. }
  15544. //=== VK_EXT_private_data ===
  15545. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot
  15546. Device::createPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo const & createInfo,
  15547. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  15548. {
  15549. return VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot( *this, createInfo, allocator );
  15550. }
  15551. VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  15552. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const VULKAN_HPP_NOEXCEPT
  15553. {
  15554. VULKAN_HPP_ASSERT( getDispatcher()->vkDestroyPrivateDataSlotEXT &&
  15555. "Function <vkDestroyPrivateDataSlotEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
  15556. getDispatcher()->vkDestroyPrivateDataSlotEXT(
  15557. static_cast<VkDevice>( m_device ),
  15558. static_cast<VkPrivateDataSlot>( privateDataSlot ),
  15559. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  15560. }
  15561. VULKAN_HPP_INLINE void Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
  15562. uint64_t objectHandle,
  15563. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  15564. uint64_t data ) const
  15565. {
  15566. VULKAN_HPP_ASSERT( getDispatcher()->vkSetPrivateDataEXT && "Function <vkSetPrivateDataEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
  15567. VkResult result = getDispatcher()->vkSetPrivateDataEXT(
  15568. static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
  15569. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" );
  15570. }
  15571. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
  15572. uint64_t objectHandle,
  15573. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot ) const VULKAN_HPP_NOEXCEPT
  15574. {
  15575. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPrivateDataEXT && "Function <vkGetPrivateDataEXT> requires <VK_EXT_private_data> or <VK_VERSION_1_3>" );
  15576. uint64_t data;
  15577. getDispatcher()->vkGetPrivateDataEXT(
  15578. static_cast<VkDevice>( m_device ), static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
  15579. return data;
  15580. }
  15581. # if defined( VK_ENABLE_BETA_EXTENSIONS )
  15582. //=== VK_KHR_video_encode_queue ===
  15583. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR(
  15584. const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const
  15585. {
  15586. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR &&
  15587. "Function <vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR> requires <VK_KHR_video_encode_queue>" );
  15588. VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR qualityLevelProperties;
  15589. VkResult result = getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR(
  15590. static_cast<VkPhysicalDevice>( m_physicalDevice ),
  15591. reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( &qualityLevelInfo ),
  15592. reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( &qualityLevelProperties ) );
  15593. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  15594. VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" );
  15595. return qualityLevelProperties;
  15596. }
  15597. template <typename X, typename Y, typename... Z>
  15598. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR(
  15599. const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo ) const
  15600. {
  15601. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR &&
  15602. "Function <vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR> requires <VK_KHR_video_encode_queue>" );
  15603. StructureChain<X, Y, Z...> structureChain;
  15604. VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR & qualityLevelProperties =
  15605. structureChain.template get<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR>();
  15606. VkResult result = getDispatcher()->vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR(
  15607. static_cast<VkPhysicalDevice>( m_physicalDevice ),
  15608. reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( &qualityLevelInfo ),
  15609. reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( &qualityLevelProperties ) );
  15610. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  15611. VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" );
  15612. return structureChain;
  15613. }
  15614. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t>>
  15615. Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const
  15616. {
  15617. VULKAN_HPP_ASSERT( getDispatcher()->vkGetEncodedVideoSessionParametersKHR &&
  15618. "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" );
  15619. std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t>> data_;
  15620. VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first;
  15621. std::vector<uint8_t> & data = data_.second;
  15622. size_t dataSize;
  15623. VkResult result;
  15624. do
  15625. {
  15626. result = getDispatcher()->vkGetEncodedVideoSessionParametersKHR(
  15627. static_cast<VkDevice>( m_device ),
  15628. reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
  15629. reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
  15630. &dataSize,
  15631. nullptr );
  15632. if ( ( result == VK_SUCCESS ) && dataSize )
  15633. {
  15634. data.resize( dataSize );
  15635. result = getDispatcher()->vkGetEncodedVideoSessionParametersKHR(
  15636. static_cast<VkDevice>( m_device ),
  15637. reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
  15638. reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
  15639. &dataSize,
  15640. reinterpret_cast<void *>( data.data() ) );
  15641. }
  15642. } while ( result == VK_INCOMPLETE );
  15643. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" );
  15644. return data_;
  15645. }
  15646. template <typename X, typename Y, typename... Z>
  15647. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t>>
  15648. Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo ) const
  15649. {
  15650. VULKAN_HPP_ASSERT( getDispatcher()->vkGetEncodedVideoSessionParametersKHR &&
  15651. "Function <vkGetEncodedVideoSessionParametersKHR> requires <VK_KHR_video_encode_queue>" );
  15652. std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t>> data_;
  15653. VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo =
  15654. data_.first.template get<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR>();
  15655. std::vector<uint8_t> & data = data_.second;
  15656. size_t dataSize;
  15657. VkResult result;
  15658. do
  15659. {
  15660. result = getDispatcher()->vkGetEncodedVideoSessionParametersKHR(
  15661. static_cast<VkDevice>( m_device ),
  15662. reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
  15663. reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
  15664. &dataSize,
  15665. nullptr );
  15666. if ( ( result == VK_SUCCESS ) && dataSize )
  15667. {
  15668. data.resize( dataSize );
  15669. result = getDispatcher()->vkGetEncodedVideoSessionParametersKHR(
  15670. static_cast<VkDevice>( m_device ),
  15671. reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
  15672. reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
  15673. &dataSize,
  15674. reinterpret_cast<void *>( data.data() ) );
  15675. }
  15676. } while ( result == VK_INCOMPLETE );
  15677. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" );
  15678. return data_;
  15679. }
  15680. VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo ) const VULKAN_HPP_NOEXCEPT
  15681. {
  15682. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdEncodeVideoKHR && "Function <vkCmdEncodeVideoKHR> requires <VK_KHR_video_encode_queue>" );
  15683. getDispatcher()->vkCmdEncodeVideoKHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) );
  15684. }
  15685. # endif /*VK_ENABLE_BETA_EXTENSIONS*/
  15686. # if defined( VK_USE_PLATFORM_METAL_EXT )
  15687. //=== VK_EXT_metal_objects ===
  15688. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT Device::exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT
  15689. {
  15690. VULKAN_HPP_ASSERT( getDispatcher()->vkExportMetalObjectsEXT && "Function <vkExportMetalObjectsEXT> requires <VK_EXT_metal_objects>" );
  15691. VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo;
  15692. getDispatcher()->vkExportMetalObjectsEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
  15693. return metalObjectsInfo;
  15694. }
  15695. template <typename X, typename Y, typename... Z>
  15696. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::exportMetalObjectsEXT() const VULKAN_HPP_NOEXCEPT
  15697. {
  15698. VULKAN_HPP_ASSERT( getDispatcher()->vkExportMetalObjectsEXT && "Function <vkExportMetalObjectsEXT> requires <VK_EXT_metal_objects>" );
  15699. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  15700. VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>();
  15701. getDispatcher()->vkExportMetalObjectsEXT( static_cast<VkDevice>( m_device ), reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
  15702. return structureChain;
  15703. }
  15704. # endif /*VK_USE_PLATFORM_METAL_EXT*/
  15705. //=== VK_KHR_synchronization2 ===
  15706. VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
  15707. const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT
  15708. {
  15709. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetEvent2KHR && "Function <vkCmdSetEvent2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
  15710. getDispatcher()->vkCmdSetEvent2KHR(
  15711. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
  15712. }
  15713. VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
  15714. VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT
  15715. {
  15716. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResetEvent2KHR && "Function <vkCmdResetEvent2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
  15717. getDispatcher()->vkCmdResetEvent2KHR(
  15718. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
  15719. }
  15720. VULKAN_HPP_INLINE void
  15721. CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
  15722. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos ) const
  15723. {
  15724. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWaitEvents2KHR && "Function <vkCmdWaitEvents2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
  15725. if ( events.size() != dependencyInfos.size() )
  15726. {
  15727. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" );
  15728. }
  15729. getDispatcher()->vkCmdWaitEvents2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  15730. events.size(),
  15731. reinterpret_cast<const VkEvent *>( events.data() ),
  15732. reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
  15733. }
  15734. VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo ) const VULKAN_HPP_NOEXCEPT
  15735. {
  15736. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdPipelineBarrier2KHR &&
  15737. "Function <vkCmdPipelineBarrier2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
  15738. getDispatcher()->vkCmdPipelineBarrier2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  15739. reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
  15740. }
  15741. VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
  15742. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  15743. uint32_t query ) const VULKAN_HPP_NOEXCEPT
  15744. {
  15745. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteTimestamp2KHR &&
  15746. "Function <vkCmdWriteTimestamp2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
  15747. getDispatcher()->vkCmdWriteTimestamp2KHR(
  15748. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
  15749. }
  15750. VULKAN_HPP_INLINE void Queue::submit2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits,
  15751. VULKAN_HPP_NAMESPACE::Fence fence ) const
  15752. {
  15753. VULKAN_HPP_ASSERT( getDispatcher()->vkQueueSubmit2KHR && "Function <vkQueueSubmit2KHR> requires <VK_KHR_synchronization2> or <VK_VERSION_1_3>" );
  15754. VkResult result = getDispatcher()->vkQueueSubmit2KHR(
  15755. static_cast<VkQueue>( m_queue ), submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) );
  15756. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" );
  15757. }
  15758. VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
  15759. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  15760. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  15761. uint32_t marker ) const VULKAN_HPP_NOEXCEPT
  15762. {
  15763. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteBufferMarker2AMD && "Function <vkCmdWriteBufferMarker2AMD> requires <VK_KHR_synchronization2>" );
  15764. getDispatcher()->vkCmdWriteBufferMarker2AMD( static_cast<VkCommandBuffer>( m_commandBuffer ),
  15765. static_cast<VkPipelineStageFlags2>( stage ),
  15766. static_cast<VkBuffer>( dstBuffer ),
  15767. static_cast<VkDeviceSize>( dstOffset ),
  15768. marker );
  15769. }
  15770. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV> Queue::getCheckpointData2NV() const
  15771. {
  15772. VULKAN_HPP_ASSERT( getDispatcher()->vkGetQueueCheckpointData2NV && "Function <vkGetQueueCheckpointData2NV> requires <VK_KHR_synchronization2>" );
  15773. std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV> checkpointData;
  15774. uint32_t checkpointDataCount;
  15775. getDispatcher()->vkGetQueueCheckpointData2NV( static_cast<VkQueue>( m_queue ), &checkpointDataCount, nullptr );
  15776. checkpointData.resize( checkpointDataCount );
  15777. getDispatcher()->vkGetQueueCheckpointData2NV(
  15778. static_cast<VkQueue>( m_queue ), &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
  15779. VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
  15780. if ( checkpointDataCount < checkpointData.size() )
  15781. {
  15782. checkpointData.resize( checkpointDataCount );
  15783. }
  15784. return checkpointData;
  15785. }
  15786. //=== VK_EXT_descriptor_buffer ===
  15787. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize DescriptorSetLayout::getSizeEXT() const VULKAN_HPP_NOEXCEPT
  15788. {
  15789. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutSizeEXT && "Function <vkGetDescriptorSetLayoutSizeEXT> requires <VK_EXT_descriptor_buffer>" );
  15790. VULKAN_HPP_NAMESPACE::DeviceSize layoutSizeInBytes;
  15791. getDispatcher()->vkGetDescriptorSetLayoutSizeEXT( static_cast<VkDevice>( m_device ),
  15792. static_cast<VkDescriptorSetLayout>( m_descriptorSetLayout ),
  15793. reinterpret_cast<VkDeviceSize *>( &layoutSizeInBytes ) );
  15794. return layoutSizeInBytes;
  15795. }
  15796. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize
  15797. DescriptorSetLayout::getBindingOffsetEXT( uint32_t binding ) const VULKAN_HPP_NOEXCEPT
  15798. {
  15799. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutBindingOffsetEXT &&
  15800. "Function <vkGetDescriptorSetLayoutBindingOffsetEXT> requires <VK_EXT_descriptor_buffer>" );
  15801. VULKAN_HPP_NAMESPACE::DeviceSize offset;
  15802. getDispatcher()->vkGetDescriptorSetLayoutBindingOffsetEXT(
  15803. static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSetLayout>( m_descriptorSetLayout ), binding, reinterpret_cast<VkDeviceSize *>( &offset ) );
  15804. return offset;
  15805. }
  15806. VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo,
  15807. size_t dataSize,
  15808. void * pDescriptor ) const VULKAN_HPP_NOEXCEPT
  15809. {
  15810. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorEXT && "Function <vkGetDescriptorEXT> requires <VK_EXT_descriptor_buffer>" );
  15811. getDispatcher()->vkGetDescriptorEXT(
  15812. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), dataSize, pDescriptor );
  15813. }
  15814. template <typename DescriptorType>
  15815. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorType
  15816. Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo ) const VULKAN_HPP_NOEXCEPT
  15817. {
  15818. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorEXT && "Function <vkGetDescriptorEXT> requires <VK_EXT_descriptor_buffer>" );
  15819. DescriptorType descriptor;
  15820. getDispatcher()->vkGetDescriptorEXT( static_cast<VkDevice>( m_device ),
  15821. reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ),
  15822. sizeof( DescriptorType ),
  15823. reinterpret_cast<void *>( &descriptor ) );
  15824. return descriptor;
  15825. }
  15826. VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT(
  15827. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT> const & bindingInfos ) const VULKAN_HPP_NOEXCEPT
  15828. {
  15829. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorBuffersEXT && "Function <vkCmdBindDescriptorBuffersEXT> requires <VK_EXT_descriptor_buffer>" );
  15830. getDispatcher()->vkCmdBindDescriptorBuffersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  15831. bindingInfos.size(),
  15832. reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( bindingInfos.data() ) );
  15833. }
  15834. VULKAN_HPP_INLINE void
  15835. CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  15836. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  15837. uint32_t firstSet,
  15838. VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & bufferIndices,
  15839. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets ) const
  15840. {
  15841. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDescriptorBufferOffsetsEXT &&
  15842. "Function <vkCmdSetDescriptorBufferOffsetsEXT> requires <VK_EXT_descriptor_buffer>" );
  15843. if ( bufferIndices.size() != offsets.size() )
  15844. {
  15845. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()" );
  15846. }
  15847. getDispatcher()->vkCmdSetDescriptorBufferOffsetsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  15848. static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
  15849. static_cast<VkPipelineLayout>( layout ),
  15850. firstSet,
  15851. bufferIndices.size(),
  15852. bufferIndices.data(),
  15853. reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
  15854. }
  15855. VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  15856. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  15857. uint32_t set ) const VULKAN_HPP_NOEXCEPT
  15858. {
  15859. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplersEXT &&
  15860. "Function <vkCmdBindDescriptorBufferEmbeddedSamplersEXT> requires <VK_EXT_descriptor_buffer>" );
  15861. getDispatcher()->vkCmdBindDescriptorBufferEmbeddedSamplersEXT(
  15862. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set );
  15863. }
  15864. template <typename DataType>
  15865. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType
  15866. Device::getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info ) const
  15867. {
  15868. VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferOpaqueCaptureDescriptorDataEXT &&
  15869. "Function <vkGetBufferOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" );
  15870. DataType data;
  15871. VkResult result = getDispatcher()->vkGetBufferOpaqueCaptureDescriptorDataEXT(
  15872. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( &info ), &data );
  15873. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" );
  15874. return data;
  15875. }
  15876. template <typename DataType>
  15877. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType
  15878. Device::getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info ) const
  15879. {
  15880. VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageOpaqueCaptureDescriptorDataEXT &&
  15881. "Function <vkGetImageOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" );
  15882. DataType data;
  15883. VkResult result = getDispatcher()->vkGetImageOpaqueCaptureDescriptorDataEXT(
  15884. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( &info ), &data );
  15885. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" );
  15886. return data;
  15887. }
  15888. template <typename DataType>
  15889. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType
  15890. Device::getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info ) const
  15891. {
  15892. VULKAN_HPP_ASSERT( getDispatcher()->vkGetImageViewOpaqueCaptureDescriptorDataEXT &&
  15893. "Function <vkGetImageViewOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" );
  15894. DataType data;
  15895. VkResult result = getDispatcher()->vkGetImageViewOpaqueCaptureDescriptorDataEXT(
  15896. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( &info ), &data );
  15897. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" );
  15898. return data;
  15899. }
  15900. template <typename DataType>
  15901. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType
  15902. Device::getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info ) const
  15903. {
  15904. VULKAN_HPP_ASSERT( getDispatcher()->vkGetSamplerOpaqueCaptureDescriptorDataEXT &&
  15905. "Function <vkGetSamplerOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" );
  15906. DataType data;
  15907. VkResult result = getDispatcher()->vkGetSamplerOpaqueCaptureDescriptorDataEXT(
  15908. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( &info ), &data );
  15909. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" );
  15910. return data;
  15911. }
  15912. template <typename DataType>
  15913. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT(
  15914. const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info ) const
  15915. {
  15916. VULKAN_HPP_ASSERT( getDispatcher()->vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT &&
  15917. "Function <vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT> requires <VK_EXT_descriptor_buffer>" );
  15918. DataType data;
  15919. VkResult result = getDispatcher()->vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT(
  15920. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( &info ), &data );
  15921. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  15922. VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" );
  15923. return data;
  15924. }
  15925. //=== VK_NV_fragment_shading_rate_enums ===
  15926. VULKAN_HPP_INLINE void
  15927. CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,
  15928. const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT
  15929. {
  15930. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetFragmentShadingRateEnumNV &&
  15931. "Function <vkCmdSetFragmentShadingRateEnumNV> requires <VK_NV_fragment_shading_rate_enums>" );
  15932. getDispatcher()->vkCmdSetFragmentShadingRateEnumNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  15933. static_cast<VkFragmentShadingRateNV>( shadingRate ),
  15934. reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
  15935. }
  15936. //=== VK_EXT_mesh_shader ===
  15937. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
  15938. {
  15939. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksEXT && "Function <vkCmdDrawMeshTasksEXT> requires <VK_EXT_mesh_shader>" );
  15940. getDispatcher()->vkCmdDrawMeshTasksEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ );
  15941. }
  15942. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
  15943. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  15944. uint32_t drawCount,
  15945. uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  15946. {
  15947. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectEXT && "Function <vkCmdDrawMeshTasksIndirectEXT> requires <VK_EXT_mesh_shader>" );
  15948. getDispatcher()->vkCmdDrawMeshTasksIndirectEXT(
  15949. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
  15950. }
  15951. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
  15952. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  15953. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  15954. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  15955. uint32_t maxDrawCount,
  15956. uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  15957. {
  15958. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMeshTasksIndirectCountEXT && "Function <vkCmdDrawMeshTasksIndirectCountEXT> requires <VK_EXT_mesh_shader>" );
  15959. getDispatcher()->vkCmdDrawMeshTasksIndirectCountEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  15960. static_cast<VkBuffer>( buffer ),
  15961. static_cast<VkDeviceSize>( offset ),
  15962. static_cast<VkBuffer>( countBuffer ),
  15963. static_cast<VkDeviceSize>( countBufferOffset ),
  15964. maxDrawCount,
  15965. stride );
  15966. }
  15967. //=== VK_KHR_copy_commands2 ===
  15968. VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo ) const VULKAN_HPP_NOEXCEPT
  15969. {
  15970. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBuffer2KHR && "Function <vkCmdCopyBuffer2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
  15971. getDispatcher()->vkCmdCopyBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
  15972. }
  15973. VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo ) const VULKAN_HPP_NOEXCEPT
  15974. {
  15975. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImage2KHR && "Function <vkCmdCopyImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
  15976. getDispatcher()->vkCmdCopyImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
  15977. }
  15978. VULKAN_HPP_INLINE void
  15979. CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
  15980. {
  15981. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyBufferToImage2KHR &&
  15982. "Function <vkCmdCopyBufferToImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
  15983. getDispatcher()->vkCmdCopyBufferToImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  15984. reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
  15985. }
  15986. VULKAN_HPP_INLINE void
  15987. CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
  15988. {
  15989. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyImageToBuffer2KHR &&
  15990. "Function <vkCmdCopyImageToBuffer2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
  15991. getDispatcher()->vkCmdCopyImageToBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  15992. reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
  15993. }
  15994. VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo ) const VULKAN_HPP_NOEXCEPT
  15995. {
  15996. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBlitImage2KHR && "Function <vkCmdBlitImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
  15997. getDispatcher()->vkCmdBlitImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
  15998. }
  15999. VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo ) const VULKAN_HPP_NOEXCEPT
  16000. {
  16001. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdResolveImage2KHR && "Function <vkCmdResolveImage2KHR> requires <VK_KHR_copy_commands2> or <VK_VERSION_1_3>" );
  16002. getDispatcher()->vkCmdResolveImage2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16003. reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
  16004. }
  16005. //=== VK_EXT_device_fault ===
  16006. VULKAN_HPP_NODISCARD
  16007. VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>>
  16008. Device::getFaultInfoEXT() const
  16009. {
  16010. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceFaultInfoEXT && "Function <vkGetDeviceFaultInfoEXT> requires <VK_EXT_device_fault>" );
  16011. std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT> data_;
  16012. VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT & faultCounts = data_.first;
  16013. VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT & faultInfo = data_.second;
  16014. VkResult result = getDispatcher()->vkGetDeviceFaultInfoEXT(
  16015. static_cast<VkDevice>( m_device ), reinterpret_cast<VkDeviceFaultCountsEXT *>( &faultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( &faultInfo ) );
  16016. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16017. VULKAN_HPP_NAMESPACE_STRING "::Device::getFaultInfoEXT",
  16018. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
  16019. return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
  16020. }
  16021. # if defined( VK_USE_PLATFORM_WIN32_KHR )
  16022. //=== VK_NV_acquire_winrt_display ===
  16023. VULKAN_HPP_INLINE void DisplayKHR::acquireWinrtNV() const
  16024. {
  16025. VULKAN_HPP_ASSERT( getDispatcher()->vkAcquireWinrtDisplayNV && "Function <vkAcquireWinrtDisplayNV> requires <VK_NV_acquire_winrt_display>" );
  16026. VkResult result = getDispatcher()->vkAcquireWinrtDisplayNV( static_cast<VkPhysicalDevice>( m_physicalDevice ), static_cast<VkDisplayKHR>( m_display ) );
  16027. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::DisplayKHR::acquireWinrtNV" );
  16028. }
  16029. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::DisplayKHR PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId ) const
  16030. {
  16031. return VULKAN_HPP_RAII_NAMESPACE::DisplayKHR( *this, deviceRelativeId );
  16032. }
  16033. # endif /*VK_USE_PLATFORM_WIN32_KHR*/
  16034. # if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
  16035. //=== VK_EXT_directfb_surface ===
  16036. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  16037. Instance::createDirectFBSurfaceEXT( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const & createInfo,
  16038. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  16039. {
  16040. return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
  16041. }
  16042. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
  16043. PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb ) const VULKAN_HPP_NOEXCEPT
  16044. {
  16045. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT &&
  16046. "Function <vkGetPhysicalDeviceDirectFBPresentationSupportEXT> requires <VK_EXT_directfb_surface>" );
  16047. VkBool32 result =
  16048. getDispatcher()->vkGetPhysicalDeviceDirectFBPresentationSupportEXT( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &dfb );
  16049. return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
  16050. }
  16051. # endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  16052. //=== VK_EXT_vertex_input_dynamic_state ===
  16053. VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT(
  16054. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
  16055. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions ) const
  16056. VULKAN_HPP_NOEXCEPT
  16057. {
  16058. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetVertexInputEXT &&
  16059. "Function <vkCmdSetVertexInputEXT> requires <VK_EXT_shader_object> or <VK_EXT_vertex_input_dynamic_state>" );
  16060. getDispatcher()->vkCmdSetVertexInputEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16061. vertexBindingDescriptions.size(),
  16062. reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ),
  16063. vertexAttributeDescriptions.size(),
  16064. reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) );
  16065. }
  16066. # if defined( VK_USE_PLATFORM_FUCHSIA )
  16067. //=== VK_FUCHSIA_external_memory ===
  16068. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t
  16069. Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const
  16070. {
  16071. VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryZirconHandleFUCHSIA && "Function <vkGetMemoryZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_memory>" );
  16072. zx_handle_t zirconHandle;
  16073. VkResult result = getDispatcher()->vkGetMemoryZirconHandleFUCHSIA(
  16074. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle );
  16075. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" );
  16076. return zirconHandle;
  16077. }
  16078. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA
  16079. Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle ) const
  16080. {
  16081. VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA &&
  16082. "Function <vkGetMemoryZirconHandlePropertiesFUCHSIA> requires <VK_FUCHSIA_external_memory>" );
  16083. VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties;
  16084. VkResult result =
  16085. getDispatcher()->vkGetMemoryZirconHandlePropertiesFUCHSIA( static_cast<VkDevice>( m_device ),
  16086. static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
  16087. zirconHandle,
  16088. reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) );
  16089. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" );
  16090. return memoryZirconHandleProperties;
  16091. }
  16092. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  16093. # if defined( VK_USE_PLATFORM_FUCHSIA )
  16094. //=== VK_FUCHSIA_external_semaphore ===
  16095. VULKAN_HPP_INLINE void
  16096. Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo ) const
  16097. {
  16098. VULKAN_HPP_ASSERT( getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA &&
  16099. "Function <vkImportSemaphoreZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_semaphore>" );
  16100. VkResult result = getDispatcher()->vkImportSemaphoreZirconHandleFUCHSIA(
  16101. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) );
  16102. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" );
  16103. }
  16104. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE zx_handle_t
  16105. Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo ) const
  16106. {
  16107. VULKAN_HPP_ASSERT( getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA &&
  16108. "Function <vkGetSemaphoreZirconHandleFUCHSIA> requires <VK_FUCHSIA_external_semaphore>" );
  16109. zx_handle_t zirconHandle;
  16110. VkResult result = getDispatcher()->vkGetSemaphoreZirconHandleFUCHSIA(
  16111. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle );
  16112. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" );
  16113. return zirconHandle;
  16114. }
  16115. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  16116. # if defined( VK_USE_PLATFORM_FUCHSIA )
  16117. //=== VK_FUCHSIA_buffer_collection ===
  16118. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA
  16119. Device::createBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA const & createInfo,
  16120. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  16121. {
  16122. return VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA( *this, createInfo, allocator );
  16123. }
  16124. VULKAN_HPP_INLINE void BufferCollectionFUCHSIA::setImageConstraints( const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo ) const
  16125. {
  16126. VULKAN_HPP_ASSERT( getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA &&
  16127. "Function <vkSetBufferCollectionImageConstraintsFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" );
  16128. VkResult result =
  16129. getDispatcher()->vkSetBufferCollectionImageConstraintsFUCHSIA( static_cast<VkDevice>( m_device ),
  16130. static_cast<VkBufferCollectionFUCHSIA>( m_collection ),
  16131. reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( &imageConstraintsInfo ) );
  16132. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setImageConstraints" );
  16133. }
  16134. VULKAN_HPP_INLINE void
  16135. BufferCollectionFUCHSIA::setBufferConstraints( const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo ) const
  16136. {
  16137. VULKAN_HPP_ASSERT( getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA &&
  16138. "Function <vkSetBufferCollectionBufferConstraintsFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" );
  16139. VkResult result =
  16140. getDispatcher()->vkSetBufferCollectionBufferConstraintsFUCHSIA( static_cast<VkDevice>( m_device ),
  16141. static_cast<VkBufferCollectionFUCHSIA>( m_collection ),
  16142. reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( &bufferConstraintsInfo ) );
  16143. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::setBufferConstraints" );
  16144. }
  16145. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA BufferCollectionFUCHSIA::getProperties() const
  16146. {
  16147. VULKAN_HPP_ASSERT( getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA &&
  16148. "Function <vkGetBufferCollectionPropertiesFUCHSIA> requires <VK_FUCHSIA_buffer_collection>" );
  16149. VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties;
  16150. VkResult result = getDispatcher()->vkGetBufferCollectionPropertiesFUCHSIA( static_cast<VkDevice>( m_device ),
  16151. static_cast<VkBufferCollectionFUCHSIA>( m_collection ),
  16152. reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( &properties ) );
  16153. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::BufferCollectionFUCHSIA::getProperties" );
  16154. return properties;
  16155. }
  16156. # endif /*VK_USE_PLATFORM_FUCHSIA*/
  16157. //=== VK_HUAWEI_subpass_shading ===
  16158. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<VULKAN_HPP_NAMESPACE::Result, VULKAN_HPP_NAMESPACE::Extent2D>
  16159. RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI() const
  16160. {
  16161. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI &&
  16162. "Function <vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI> requires <VK_HUAWEI_subpass_shading>" );
  16163. VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize;
  16164. VkResult result = getDispatcher()->vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
  16165. static_cast<VkDevice>( m_device ), static_cast<VkRenderPass>( m_renderPass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) );
  16166. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16167. VULKAN_HPP_NAMESPACE_STRING "::RenderPass::getSubpassShadingMaxWorkgroupSizeHUAWEI",
  16168. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
  16169. return std::make_pair( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), maxWorkgroupSize );
  16170. }
  16171. VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI() const VULKAN_HPP_NOEXCEPT
  16172. {
  16173. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSubpassShadingHUAWEI && "Function <vkCmdSubpassShadingHUAWEI> requires <VK_HUAWEI_subpass_shading>" );
  16174. getDispatcher()->vkCmdSubpassShadingHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ) );
  16175. }
  16176. //=== VK_HUAWEI_invocation_mask ===
  16177. VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView,
  16178. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
  16179. {
  16180. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindInvocationMaskHUAWEI && "Function <vkCmdBindInvocationMaskHUAWEI> requires <VK_HUAWEI_invocation_mask>" );
  16181. getDispatcher()->vkCmdBindInvocationMaskHUAWEI(
  16182. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
  16183. }
  16184. //=== VK_NV_external_memory_rdma ===
  16185. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::RemoteAddressNV
  16186. Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo ) const
  16187. {
  16188. VULKAN_HPP_ASSERT( getDispatcher()->vkGetMemoryRemoteAddressNV && "Function <vkGetMemoryRemoteAddressNV> requires <VK_NV_external_memory_rdma>" );
  16189. VULKAN_HPP_NAMESPACE::RemoteAddressNV address;
  16190. VkResult result = getDispatcher()->vkGetMemoryRemoteAddressNV( static_cast<VkDevice>( m_device ),
  16191. reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &memoryGetRemoteAddressInfo ),
  16192. reinterpret_cast<VkRemoteAddressNV *>( &address ) );
  16193. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" );
  16194. return address;
  16195. }
  16196. //=== VK_EXT_pipeline_properties ===
  16197. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::BaseOutStructure
  16198. Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo ) const
  16199. {
  16200. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelinePropertiesEXT && "Function <vkGetPipelinePropertiesEXT> requires <VK_EXT_pipeline_properties>" );
  16201. VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties;
  16202. VkResult result = getDispatcher()->vkGetPipelinePropertiesEXT( static_cast<VkDevice>( m_device ),
  16203. reinterpret_cast<const VkPipelineInfoEXT *>( &pipelineInfo ),
  16204. reinterpret_cast<VkBaseOutStructure *>( &pipelineProperties ) );
  16205. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" );
  16206. return pipelineProperties;
  16207. }
  16208. //=== VK_EXT_extended_dynamic_state2 ===
  16209. VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT
  16210. {
  16211. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPatchControlPointsEXT &&
  16212. "Function <vkCmdSetPatchControlPointsEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object>" );
  16213. getDispatcher()->vkCmdSetPatchControlPointsEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), patchControlPoints );
  16214. }
  16215. VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
  16216. {
  16217. VULKAN_HPP_ASSERT(
  16218. getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT &&
  16219. "Function <vkCmdSetRasterizerDiscardEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  16220. getDispatcher()->vkCmdSetRasterizerDiscardEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( rasterizerDiscardEnable ) );
  16221. }
  16222. VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
  16223. {
  16224. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthBiasEnableEXT &&
  16225. "Function <vkCmdSetDepthBiasEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  16226. getDispatcher()->vkCmdSetDepthBiasEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthBiasEnable ) );
  16227. }
  16228. VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp ) const VULKAN_HPP_NOEXCEPT
  16229. {
  16230. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEXT &&
  16231. "Function <vkCmdSetLogicOpEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object>" );
  16232. getDispatcher()->vkCmdSetLogicOpEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkLogicOp>( logicOp ) );
  16233. }
  16234. VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
  16235. {
  16236. VULKAN_HPP_ASSERT(
  16237. getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT &&
  16238. "Function <vkCmdSetPrimitiveRestartEnableEXT> requires <VK_EXT_extended_dynamic_state2> or <VK_EXT_shader_object> or <VK_VERSION_1_3>" );
  16239. getDispatcher()->vkCmdSetPrimitiveRestartEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( primitiveRestartEnable ) );
  16240. }
  16241. # if defined( VK_USE_PLATFORM_SCREEN_QNX )
  16242. //=== VK_QNX_screen_surface ===
  16243. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR
  16244. Instance::createScreenSurfaceQNX( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX const & createInfo,
  16245. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  16246. {
  16247. return VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR( *this, createInfo, allocator );
  16248. }
  16249. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
  16250. PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window ) const VULKAN_HPP_NOEXCEPT
  16251. {
  16252. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX &&
  16253. "Function <vkGetPhysicalDeviceScreenPresentationSupportQNX> requires <VK_QNX_screen_surface>" );
  16254. VkBool32 result =
  16255. getDispatcher()->vkGetPhysicalDeviceScreenPresentationSupportQNX( static_cast<VkPhysicalDevice>( m_physicalDevice ), queueFamilyIndex, &window );
  16256. return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
  16257. }
  16258. # endif /*VK_USE_PLATFORM_SCREEN_QNX*/
  16259. //=== VK_EXT_color_write_enable ===
  16260. VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT(
  16261. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables ) const VULKAN_HPP_NOEXCEPT
  16262. {
  16263. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteEnableEXT && "Function <vkCmdSetColorWriteEnableEXT> requires <VK_EXT_color_write_enable>" );
  16264. getDispatcher()->vkCmdSetColorWriteEnableEXT(
  16265. static_cast<VkCommandBuffer>( m_commandBuffer ), colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) );
  16266. }
  16267. //=== VK_KHR_ray_tracing_maintenance1 ===
  16268. VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT
  16269. {
  16270. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdTraceRaysIndirect2KHR && "Function <vkCmdTraceRaysIndirect2KHR> requires <VK_KHR_ray_tracing_maintenance1>" );
  16271. getDispatcher()->vkCmdTraceRaysIndirect2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
  16272. }
  16273. //=== VK_EXT_multi_draw ===
  16274. VULKAN_HPP_INLINE void
  16275. CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,
  16276. uint32_t instanceCount,
  16277. uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
  16278. {
  16279. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiEXT && "Function <vkCmdDrawMultiEXT> requires <VK_EXT_multi_draw>" );
  16280. getDispatcher()->vkCmdDrawMultiEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16281. vertexInfo.size(),
  16282. reinterpret_cast<const VkMultiDrawInfoEXT *>( vertexInfo.data() ),
  16283. instanceCount,
  16284. firstInstance,
  16285. vertexInfo.stride() );
  16286. }
  16287. VULKAN_HPP_INLINE void
  16288. CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,
  16289. uint32_t instanceCount,
  16290. uint32_t firstInstance,
  16291. Optional<const int32_t> vertexOffset ) const VULKAN_HPP_NOEXCEPT
  16292. {
  16293. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawMultiIndexedEXT && "Function <vkCmdDrawMultiIndexedEXT> requires <VK_EXT_multi_draw>" );
  16294. getDispatcher()->vkCmdDrawMultiIndexedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16295. indexInfo.size(),
  16296. reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( indexInfo.data() ),
  16297. instanceCount,
  16298. firstInstance,
  16299. indexInfo.stride(),
  16300. static_cast<const int32_t *>( vertexOffset ) );
  16301. }
  16302. //=== VK_EXT_opacity_micromap ===
  16303. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::MicromapEXT
  16304. Device::createMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT const & createInfo,
  16305. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  16306. {
  16307. return VULKAN_HPP_RAII_NAMESPACE::MicromapEXT( *this, createInfo, allocator );
  16308. }
  16309. VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT(
  16310. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const VULKAN_HPP_NOEXCEPT
  16311. {
  16312. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBuildMicromapsEXT && "Function <vkCmdBuildMicromapsEXT> requires <VK_EXT_opacity_micromap>" );
  16313. getDispatcher()->vkCmdBuildMicromapsEXT(
  16314. static_cast<VkCommandBuffer>( m_commandBuffer ), infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
  16315. }
  16316. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  16317. Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  16318. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos ) const
  16319. {
  16320. VULKAN_HPP_ASSERT( getDispatcher()->vkBuildMicromapsEXT && "Function <vkBuildMicromapsEXT> requires <VK_EXT_opacity_micromap>" );
  16321. VkResult result = getDispatcher()->vkBuildMicromapsEXT( static_cast<VkDevice>( m_device ),
  16322. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  16323. infos.size(),
  16324. reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
  16325. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16326. VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT",
  16327. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  16328. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  16329. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  16330. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  16331. }
  16332. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  16333. const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const
  16334. {
  16335. VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMicromapEXT && "Function <vkCopyMicromapEXT> requires <VK_EXT_opacity_micromap>" );
  16336. VkResult result = getDispatcher()->vkCopyMicromapEXT(
  16337. static_cast<VkDevice>( m_device ), static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
  16338. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16339. VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT",
  16340. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  16341. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  16342. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  16343. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  16344. }
  16345. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  16346. Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  16347. const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const
  16348. {
  16349. VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMicromapToMemoryEXT && "Function <vkCopyMicromapToMemoryEXT> requires <VK_EXT_opacity_micromap>" );
  16350. VkResult result = getDispatcher()->vkCopyMicromapToMemoryEXT( static_cast<VkDevice>( m_device ),
  16351. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  16352. reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
  16353. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16354. VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT",
  16355. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  16356. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  16357. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  16358. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  16359. }
  16360. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  16361. Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  16362. const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const
  16363. {
  16364. VULKAN_HPP_ASSERT( getDispatcher()->vkCopyMemoryToMicromapEXT && "Function <vkCopyMemoryToMicromapEXT> requires <VK_EXT_opacity_micromap>" );
  16365. VkResult result = getDispatcher()->vkCopyMemoryToMicromapEXT( static_cast<VkDevice>( m_device ),
  16366. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  16367. reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
  16368. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16369. VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT",
  16370. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  16371. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  16372. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  16373. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  16374. }
  16375. template <typename DataType>
  16376. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<DataType>
  16377. Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
  16378. VULKAN_HPP_NAMESPACE::QueryType queryType,
  16379. size_t dataSize,
  16380. size_t stride ) const
  16381. {
  16382. VULKAN_HPP_ASSERT( getDispatcher()->vkWriteMicromapsPropertiesEXT && "Function <vkWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" );
  16383. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  16384. std::vector<DataType> data( dataSize / sizeof( DataType ) );
  16385. VkResult result = getDispatcher()->vkWriteMicromapsPropertiesEXT( static_cast<VkDevice>( m_device ),
  16386. micromaps.size(),
  16387. reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
  16388. static_cast<VkQueryType>( queryType ),
  16389. data.size() * sizeof( DataType ),
  16390. reinterpret_cast<void *>( data.data() ),
  16391. stride );
  16392. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" );
  16393. return data;
  16394. }
  16395. template <typename DataType>
  16396. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DataType
  16397. Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
  16398. VULKAN_HPP_NAMESPACE::QueryType queryType,
  16399. size_t stride ) const
  16400. {
  16401. VULKAN_HPP_ASSERT( getDispatcher()->vkWriteMicromapsPropertiesEXT && "Function <vkWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" );
  16402. DataType data;
  16403. VkResult result = getDispatcher()->vkWriteMicromapsPropertiesEXT( static_cast<VkDevice>( m_device ),
  16404. micromaps.size(),
  16405. reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
  16406. static_cast<VkQueryType>( queryType ),
  16407. sizeof( DataType ),
  16408. reinterpret_cast<void *>( &data ),
  16409. stride );
  16410. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" );
  16411. return data;
  16412. }
  16413. VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT
  16414. {
  16415. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMicromapEXT && "Function <vkCmdCopyMicromapEXT> requires <VK_EXT_opacity_micromap>" );
  16416. getDispatcher()->vkCmdCopyMicromapEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
  16417. }
  16418. VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info ) const VULKAN_HPP_NOEXCEPT
  16419. {
  16420. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMicromapToMemoryEXT && "Function <vkCmdCopyMicromapToMemoryEXT> requires <VK_EXT_opacity_micromap>" );
  16421. getDispatcher()->vkCmdCopyMicromapToMemoryEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16422. reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
  16423. }
  16424. VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info ) const VULKAN_HPP_NOEXCEPT
  16425. {
  16426. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToMicromapEXT && "Function <vkCmdCopyMemoryToMicromapEXT> requires <VK_EXT_opacity_micromap>" );
  16427. getDispatcher()->vkCmdCopyMemoryToMicromapEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16428. reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
  16429. }
  16430. VULKAN_HPP_INLINE void
  16431. CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
  16432. VULKAN_HPP_NAMESPACE::QueryType queryType,
  16433. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  16434. uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
  16435. {
  16436. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdWriteMicromapsPropertiesEXT &&
  16437. "Function <vkCmdWriteMicromapsPropertiesEXT> requires <VK_EXT_opacity_micromap>" );
  16438. getDispatcher()->vkCmdWriteMicromapsPropertiesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16439. micromaps.size(),
  16440. reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
  16441. static_cast<VkQueryType>( queryType ),
  16442. static_cast<VkQueryPool>( queryPool ),
  16443. firstQuery );
  16444. }
  16445. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
  16446. Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo ) const VULKAN_HPP_NOEXCEPT
  16447. {
  16448. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceMicromapCompatibilityEXT &&
  16449. "Function <vkGetDeviceMicromapCompatibilityEXT> requires <VK_EXT_opacity_micromap>" );
  16450. VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
  16451. getDispatcher()->vkGetDeviceMicromapCompatibilityEXT( static_cast<VkDevice>( m_device ),
  16452. reinterpret_cast<const VkMicromapVersionInfoEXT *>( &versionInfo ),
  16453. reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
  16454. return compatibility;
  16455. }
  16456. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT
  16457. Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
  16458. const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo ) const VULKAN_HPP_NOEXCEPT
  16459. {
  16460. VULKAN_HPP_ASSERT( getDispatcher()->vkGetMicromapBuildSizesEXT && "Function <vkGetMicromapBuildSizesEXT> requires <VK_EXT_opacity_micromap>" );
  16461. VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo;
  16462. getDispatcher()->vkGetMicromapBuildSizesEXT( static_cast<VkDevice>( m_device ),
  16463. static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
  16464. reinterpret_cast<const VkMicromapBuildInfoEXT *>( &buildInfo ),
  16465. reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( &sizeInfo ) );
  16466. return sizeInfo;
  16467. }
  16468. //=== VK_HUAWEI_cluster_culling_shader ===
  16469. VULKAN_HPP_INLINE void CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
  16470. {
  16471. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawClusterHUAWEI && "Function <vkCmdDrawClusterHUAWEI> requires <VK_HUAWEI_cluster_culling_shader>" );
  16472. getDispatcher()->vkCmdDrawClusterHUAWEI( static_cast<VkCommandBuffer>( m_commandBuffer ), groupCountX, groupCountY, groupCountZ );
  16473. }
  16474. VULKAN_HPP_INLINE void CommandBuffer::drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer,
  16475. VULKAN_HPP_NAMESPACE::DeviceSize offset ) const VULKAN_HPP_NOEXCEPT
  16476. {
  16477. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDrawClusterIndirectHUAWEI &&
  16478. "Function <vkCmdDrawClusterIndirectHUAWEI> requires <VK_HUAWEI_cluster_culling_shader>" );
  16479. getDispatcher()->vkCmdDrawClusterIndirectHUAWEI(
  16480. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
  16481. }
  16482. //=== VK_EXT_pageable_device_local_memory ===
  16483. VULKAN_HPP_INLINE void DeviceMemory::setPriorityEXT( float priority ) const VULKAN_HPP_NOEXCEPT
  16484. {
  16485. VULKAN_HPP_ASSERT( getDispatcher()->vkSetDeviceMemoryPriorityEXT &&
  16486. "Function <vkSetDeviceMemoryPriorityEXT> requires <VK_EXT_pageable_device_local_memory>" );
  16487. getDispatcher()->vkSetDeviceMemoryPriorityEXT( static_cast<VkDevice>( m_device ), static_cast<VkDeviceMemory>( m_memory ), priority );
  16488. }
  16489. //=== VK_KHR_maintenance4 ===
  16490. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  16491. Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
  16492. {
  16493. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR &&
  16494. "Function <vkGetDeviceBufferMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
  16495. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  16496. getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
  16497. reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
  16498. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  16499. return memoryRequirements;
  16500. }
  16501. template <typename X, typename Y, typename... Z>
  16502. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  16503. Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
  16504. {
  16505. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR &&
  16506. "Function <vkGetDeviceBufferMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
  16507. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  16508. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  16509. getDispatcher()->vkGetDeviceBufferMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
  16510. reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ),
  16511. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  16512. return structureChain;
  16513. }
  16514. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  16515. Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
  16516. {
  16517. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR &&
  16518. "Function <vkGetDeviceImageMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
  16519. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  16520. getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
  16521. reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
  16522. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  16523. return memoryRequirements;
  16524. }
  16525. template <typename X, typename Y, typename... Z>
  16526. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  16527. Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const VULKAN_HPP_NOEXCEPT
  16528. {
  16529. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR &&
  16530. "Function <vkGetDeviceImageMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
  16531. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  16532. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  16533. getDispatcher()->vkGetDeviceImageMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
  16534. reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
  16535. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  16536. return structureChain;
  16537. }
  16538. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>
  16539. Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info ) const
  16540. {
  16541. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR &&
  16542. "Function <vkGetDeviceImageSparseMemoryRequirementsKHR> requires <VK_KHR_maintenance4> or <VK_VERSION_1_3>" );
  16543. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2> sparseMemoryRequirements;
  16544. uint32_t sparseMemoryRequirementCount;
  16545. getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR(
  16546. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
  16547. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  16548. getDispatcher()->vkGetDeviceImageSparseMemoryRequirementsKHR( static_cast<VkDevice>( m_device ),
  16549. reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
  16550. &sparseMemoryRequirementCount,
  16551. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  16552. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  16553. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  16554. {
  16555. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  16556. }
  16557. return sparseMemoryRequirements;
  16558. }
  16559. //=== VK_VALVE_descriptor_set_host_mapping ===
  16560. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE Device::getDescriptorSetLayoutHostMappingInfoVALVE(
  16561. const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference ) const VULKAN_HPP_NOEXCEPT
  16562. {
  16563. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetLayoutHostMappingInfoVALVE &&
  16564. "Function <vkGetDescriptorSetLayoutHostMappingInfoVALVE> requires <VK_VALVE_descriptor_set_host_mapping>" );
  16565. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping;
  16566. getDispatcher()->vkGetDescriptorSetLayoutHostMappingInfoVALVE( static_cast<VkDevice>( m_device ),
  16567. reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( &bindingReference ),
  16568. reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( &hostMapping ) );
  16569. return hostMapping;
  16570. }
  16571. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * DescriptorSet::getHostMappingVALVE() const VULKAN_HPP_NOEXCEPT
  16572. {
  16573. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDescriptorSetHostMappingVALVE &&
  16574. "Function <vkGetDescriptorSetHostMappingVALVE> requires <VK_VALVE_descriptor_set_host_mapping>" );
  16575. void * pData;
  16576. getDispatcher()->vkGetDescriptorSetHostMappingVALVE( static_cast<VkDevice>( m_device ), static_cast<VkDescriptorSet>( m_descriptorSet ), &pData );
  16577. return pData;
  16578. }
  16579. //=== VK_NV_copy_memory_indirect ===
  16580. VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,
  16581. uint32_t copyCount,
  16582. uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  16583. {
  16584. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryIndirectNV && "Function <vkCmdCopyMemoryIndirectNV> requires <VK_NV_copy_memory_indirect>" );
  16585. getDispatcher()->vkCmdCopyMemoryIndirectNV(
  16586. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount, stride );
  16587. }
  16588. VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV(
  16589. VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,
  16590. uint32_t stride,
  16591. VULKAN_HPP_NAMESPACE::Image dstImage,
  16592. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  16593. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers> const & imageSubresources ) const VULKAN_HPP_NOEXCEPT
  16594. {
  16595. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdCopyMemoryToImageIndirectNV &&
  16596. "Function <vkCmdCopyMemoryToImageIndirectNV> requires <VK_NV_copy_memory_indirect>" );
  16597. getDispatcher()->vkCmdCopyMemoryToImageIndirectNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16598. static_cast<VkDeviceAddress>( copyBufferAddress ),
  16599. imageSubresources.size(),
  16600. stride,
  16601. static_cast<VkImage>( dstImage ),
  16602. static_cast<VkImageLayout>( dstImageLayout ),
  16603. reinterpret_cast<const VkImageSubresourceLayers *>( imageSubresources.data() ) );
  16604. }
  16605. //=== VK_NV_memory_decompression ===
  16606. VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV(
  16607. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV> const & decompressMemoryRegions ) const VULKAN_HPP_NOEXCEPT
  16608. {
  16609. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecompressMemoryNV && "Function <vkCmdDecompressMemoryNV> requires <VK_NV_memory_decompression>" );
  16610. getDispatcher()->vkCmdDecompressMemoryNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16611. decompressMemoryRegions.size(),
  16612. reinterpret_cast<const VkDecompressMemoryRegionNV *>( decompressMemoryRegions.data() ) );
  16613. }
  16614. VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress,
  16615. VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress,
  16616. uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  16617. {
  16618. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdDecompressMemoryIndirectCountNV &&
  16619. "Function <vkCmdDecompressMemoryIndirectCountNV> requires <VK_NV_memory_decompression>" );
  16620. getDispatcher()->vkCmdDecompressMemoryIndirectCountNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16621. static_cast<VkDeviceAddress>( indirectCommandsAddress ),
  16622. static_cast<VkDeviceAddress>( indirectCommandsCountAddress ),
  16623. stride );
  16624. }
  16625. //=== VK_NV_device_generated_commands_compute ===
  16626. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  16627. Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
  16628. {
  16629. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV &&
  16630. "Function <vkGetPipelineIndirectMemoryRequirementsNV> requires <VK_NV_device_generated_commands_compute>" );
  16631. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  16632. getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
  16633. reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
  16634. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  16635. return memoryRequirements;
  16636. }
  16637. template <typename X, typename Y, typename... Z>
  16638. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  16639. Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
  16640. {
  16641. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV &&
  16642. "Function <vkGetPipelineIndirectMemoryRequirementsNV> requires <VK_NV_device_generated_commands_compute>" );
  16643. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  16644. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  16645. getDispatcher()->vkGetPipelineIndirectMemoryRequirementsNV( static_cast<VkDevice>( m_device ),
  16646. reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
  16647. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  16648. return structureChain;
  16649. }
  16650. VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  16651. VULKAN_HPP_NAMESPACE::Pipeline pipeline ) const VULKAN_HPP_NOEXCEPT
  16652. {
  16653. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdUpdatePipelineIndirectBufferNV &&
  16654. "Function <vkCmdUpdatePipelineIndirectBufferNV> requires <VK_NV_device_generated_commands_compute>" );
  16655. getDispatcher()->vkCmdUpdatePipelineIndirectBufferNV(
  16656. static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
  16657. }
  16658. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
  16659. Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info ) const VULKAN_HPP_NOEXCEPT
  16660. {
  16661. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPipelineIndirectDeviceAddressNV &&
  16662. "Function <vkGetPipelineIndirectDeviceAddressNV> requires <VK_NV_device_generated_commands_compute>" );
  16663. VkDeviceAddress result = getDispatcher()->vkGetPipelineIndirectDeviceAddressNV(
  16664. static_cast<VkDevice>( m_device ), reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( &info ) );
  16665. return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
  16666. }
  16667. //=== VK_EXT_extended_dynamic_state3 ===
  16668. VULKAN_HPP_INLINE void
  16669. CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT
  16670. {
  16671. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetTessellationDomainOriginEXT &&
  16672. "Function <vkCmdSetTessellationDomainOriginEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16673. getDispatcher()->vkCmdSetTessellationDomainOriginEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16674. static_cast<VkTessellationDomainOrigin>( domainOrigin ) );
  16675. }
  16676. VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT
  16677. {
  16678. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClampEnableEXT &&
  16679. "Function <vkCmdSetDepthClampEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16680. getDispatcher()->vkCmdSetDepthClampEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClampEnable ) );
  16681. }
  16682. VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT
  16683. {
  16684. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetPolygonModeEXT &&
  16685. "Function <vkCmdSetPolygonModeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16686. getDispatcher()->vkCmdSetPolygonModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkPolygonMode>( polygonMode ) );
  16687. }
  16688. VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT
  16689. {
  16690. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationSamplesEXT &&
  16691. "Function <vkCmdSetRasterizationSamplesEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16692. getDispatcher()->vkCmdSetRasterizationSamplesEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16693. static_cast<VkSampleCountFlagBits>( rasterizationSamples ) );
  16694. }
  16695. VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
  16696. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask ) const
  16697. {
  16698. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleMaskEXT &&
  16699. "Function <vkCmdSetSampleMaskEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16700. if ( sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32 )
  16701. {
  16702. throw LogicError( VULKAN_HPP_NAMESPACE_STRING
  16703. "::CommandBuffer::setSampleMaskEXT: sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32" );
  16704. }
  16705. getDispatcher()->vkCmdSetSampleMaskEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16706. static_cast<VkSampleCountFlagBits>( samples ),
  16707. reinterpret_cast<const VkSampleMask *>( sampleMask.data() ) );
  16708. }
  16709. VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT
  16710. {
  16711. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT &&
  16712. "Function <vkCmdSetAlphaToCoverageEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16713. getDispatcher()->vkCmdSetAlphaToCoverageEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToCoverageEnable ) );
  16714. }
  16715. VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT
  16716. {
  16717. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAlphaToOneEnableEXT &&
  16718. "Function <vkCmdSetAlphaToOneEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16719. getDispatcher()->vkCmdSetAlphaToOneEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( alphaToOneEnable ) );
  16720. }
  16721. VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT
  16722. {
  16723. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLogicOpEnableEXT &&
  16724. "Function <vkCmdSetLogicOpEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16725. getDispatcher()->vkCmdSetLogicOpEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( logicOpEnable ) );
  16726. }
  16727. VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT(
  16728. uint32_t firstAttachment, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables ) const VULKAN_HPP_NOEXCEPT
  16729. {
  16730. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEnableEXT &&
  16731. "Function <vkCmdSetColorBlendEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16732. getDispatcher()->vkCmdSetColorBlendEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16733. firstAttachment,
  16734. colorBlendEnables.size(),
  16735. reinterpret_cast<const VkBool32 *>( colorBlendEnables.data() ) );
  16736. }
  16737. VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT(
  16738. uint32_t firstAttachment,
  16739. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations ) const VULKAN_HPP_NOEXCEPT
  16740. {
  16741. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendEquationEXT &&
  16742. "Function <vkCmdSetColorBlendEquationEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16743. getDispatcher()->vkCmdSetColorBlendEquationEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16744. firstAttachment,
  16745. colorBlendEquations.size(),
  16746. reinterpret_cast<const VkColorBlendEquationEXT *>( colorBlendEquations.data() ) );
  16747. }
  16748. VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT(
  16749. uint32_t firstAttachment,
  16750. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks ) const VULKAN_HPP_NOEXCEPT
  16751. {
  16752. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorWriteMaskEXT &&
  16753. "Function <vkCmdSetColorWriteMaskEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16754. getDispatcher()->vkCmdSetColorWriteMaskEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16755. firstAttachment,
  16756. colorWriteMasks.size(),
  16757. reinterpret_cast<const VkColorComponentFlags *>( colorWriteMasks.data() ) );
  16758. }
  16759. VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT
  16760. {
  16761. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRasterizationStreamEXT &&
  16762. "Function <vkCmdSetRasterizationStreamEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16763. getDispatcher()->vkCmdSetRasterizationStreamEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), rasterizationStream );
  16764. }
  16765. VULKAN_HPP_INLINE void CommandBuffer::setConservativeRasterizationModeEXT(
  16766. VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT
  16767. {
  16768. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetConservativeRasterizationModeEXT &&
  16769. "Function <vkCmdSetConservativeRasterizationModeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16770. getDispatcher()->vkCmdSetConservativeRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16771. static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) );
  16772. }
  16773. VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT
  16774. {
  16775. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT &&
  16776. "Function <vkCmdSetExtraPrimitiveOverestimationSizeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16777. getDispatcher()->vkCmdSetExtraPrimitiveOverestimationSizeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), extraPrimitiveOverestimationSize );
  16778. }
  16779. VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT
  16780. {
  16781. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipEnableEXT &&
  16782. "Function <vkCmdSetDepthClipEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16783. getDispatcher()->vkCmdSetDepthClipEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( depthClipEnable ) );
  16784. }
  16785. VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT
  16786. {
  16787. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetSampleLocationsEnableEXT &&
  16788. "Function <vkCmdSetSampleLocationsEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16789. getDispatcher()->vkCmdSetSampleLocationsEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( sampleLocationsEnable ) );
  16790. }
  16791. VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT(
  16792. uint32_t firstAttachment,
  16793. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT
  16794. {
  16795. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetColorBlendAdvancedEXT &&
  16796. "Function <vkCmdSetColorBlendAdvancedEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16797. getDispatcher()->vkCmdSetColorBlendAdvancedEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16798. firstAttachment,
  16799. colorBlendAdvanced.size(),
  16800. reinterpret_cast<const VkColorBlendAdvancedEXT *>( colorBlendAdvanced.data() ) );
  16801. }
  16802. VULKAN_HPP_INLINE void
  16803. CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT
  16804. {
  16805. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetProvokingVertexModeEXT &&
  16806. "Function <vkCmdSetProvokingVertexModeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16807. getDispatcher()->vkCmdSetProvokingVertexModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16808. static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) );
  16809. }
  16810. VULKAN_HPP_INLINE void
  16811. CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT
  16812. {
  16813. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineRasterizationModeEXT &&
  16814. "Function <vkCmdSetLineRasterizationModeEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16815. getDispatcher()->vkCmdSetLineRasterizationModeEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16816. static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) );
  16817. }
  16818. VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT
  16819. {
  16820. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetLineStippleEnableEXT &&
  16821. "Function <vkCmdSetLineStippleEnableEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16822. getDispatcher()->vkCmdSetLineStippleEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( stippledLineEnable ) );
  16823. }
  16824. VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT
  16825. {
  16826. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT &&
  16827. "Function <vkCmdSetDepthClipNegativeOneToOneEXT> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16828. getDispatcher()->vkCmdSetDepthClipNegativeOneToOneEXT( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( negativeOneToOne ) );
  16829. }
  16830. VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT
  16831. {
  16832. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportWScalingEnableNV &&
  16833. "Function <vkCmdSetViewportWScalingEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16834. getDispatcher()->vkCmdSetViewportWScalingEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( viewportWScalingEnable ) );
  16835. }
  16836. VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV(
  16837. uint32_t firstViewport,
  16838. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles ) const VULKAN_HPP_NOEXCEPT
  16839. {
  16840. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetViewportSwizzleNV &&
  16841. "Function <vkCmdSetViewportSwizzleNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16842. getDispatcher()->vkCmdSetViewportSwizzleNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16843. firstViewport,
  16844. viewportSwizzles.size(),
  16845. reinterpret_cast<const VkViewportSwizzleNV *>( viewportSwizzles.data() ) );
  16846. }
  16847. VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT
  16848. {
  16849. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorEnableNV &&
  16850. "Function <vkCmdSetCoverageToColorEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16851. getDispatcher()->vkCmdSetCoverageToColorEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( coverageToColorEnable ) );
  16852. }
  16853. VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT
  16854. {
  16855. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageToColorLocationNV &&
  16856. "Function <vkCmdSetCoverageToColorLocationNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16857. getDispatcher()->vkCmdSetCoverageToColorLocationNV( static_cast<VkCommandBuffer>( m_commandBuffer ), coverageToColorLocation );
  16858. }
  16859. VULKAN_HPP_INLINE void
  16860. CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT
  16861. {
  16862. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationModeNV &&
  16863. "Function <vkCmdSetCoverageModulationModeNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16864. getDispatcher()->vkCmdSetCoverageModulationModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16865. static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) );
  16866. }
  16867. VULKAN_HPP_INLINE void
  16868. CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT
  16869. {
  16870. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableEnableNV &&
  16871. "Function <vkCmdSetCoverageModulationTableEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16872. getDispatcher()->vkCmdSetCoverageModulationTableEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16873. static_cast<VkBool32>( coverageModulationTableEnable ) );
  16874. }
  16875. VULKAN_HPP_INLINE void
  16876. CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable ) const VULKAN_HPP_NOEXCEPT
  16877. {
  16878. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageModulationTableNV &&
  16879. "Function <vkCmdSetCoverageModulationTableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16880. getDispatcher()->vkCmdSetCoverageModulationTableNV(
  16881. static_cast<VkCommandBuffer>( m_commandBuffer ), coverageModulationTable.size(), coverageModulationTable.data() );
  16882. }
  16883. VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT
  16884. {
  16885. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetShadingRateImageEnableNV &&
  16886. "Function <vkCmdSetShadingRateImageEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16887. getDispatcher()->vkCmdSetShadingRateImageEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ), static_cast<VkBool32>( shadingRateImageEnable ) );
  16888. }
  16889. VULKAN_HPP_INLINE void
  16890. CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT
  16891. {
  16892. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV &&
  16893. "Function <vkCmdSetRepresentativeFragmentTestEnableNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16894. getDispatcher()->vkCmdSetRepresentativeFragmentTestEnableNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16895. static_cast<VkBool32>( representativeFragmentTestEnable ) );
  16896. }
  16897. VULKAN_HPP_INLINE void
  16898. CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT
  16899. {
  16900. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetCoverageReductionModeNV &&
  16901. "Function <vkCmdSetCoverageReductionModeNV> requires <VK_EXT_extended_dynamic_state3> or <VK_EXT_shader_object>" );
  16902. getDispatcher()->vkCmdSetCoverageReductionModeNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16903. static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) );
  16904. }
  16905. //=== VK_EXT_shader_module_identifier ===
  16906. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT ShaderModule::getIdentifierEXT() const VULKAN_HPP_NOEXCEPT
  16907. {
  16908. VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderModuleIdentifierEXT &&
  16909. "Function <vkGetShaderModuleIdentifierEXT> requires <VK_EXT_shader_module_identifier>" );
  16910. VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier;
  16911. getDispatcher()->vkGetShaderModuleIdentifierEXT(
  16912. static_cast<VkDevice>( m_device ), static_cast<VkShaderModule>( m_shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
  16913. return identifier;
  16914. }
  16915. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT
  16916. Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo ) const VULKAN_HPP_NOEXCEPT
  16917. {
  16918. VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderModuleCreateInfoIdentifierEXT &&
  16919. "Function <vkGetShaderModuleCreateInfoIdentifierEXT> requires <VK_EXT_shader_module_identifier>" );
  16920. VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier;
  16921. getDispatcher()->vkGetShaderModuleCreateInfoIdentifierEXT( static_cast<VkDevice>( m_device ),
  16922. reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
  16923. reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
  16924. return identifier;
  16925. }
  16926. //=== VK_NV_optical_flow ===
  16927. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>
  16928. PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo ) const
  16929. {
  16930. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV &&
  16931. "Function <vkGetPhysicalDeviceOpticalFlowImageFormatsNV> requires <VK_NV_optical_flow>" );
  16932. std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV> imageFormatProperties;
  16933. uint32_t formatCount;
  16934. VkResult result;
  16935. do
  16936. {
  16937. result = getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
  16938. static_cast<VkPhysicalDevice>( m_physicalDevice ),
  16939. reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
  16940. &formatCount,
  16941. nullptr );
  16942. if ( ( result == VK_SUCCESS ) && formatCount )
  16943. {
  16944. imageFormatProperties.resize( formatCount );
  16945. result = getDispatcher()->vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
  16946. static_cast<VkPhysicalDevice>( m_physicalDevice ),
  16947. reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
  16948. &formatCount,
  16949. reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) );
  16950. }
  16951. } while ( result == VK_INCOMPLETE );
  16952. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" );
  16953. VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() );
  16954. if ( formatCount < imageFormatProperties.size() )
  16955. {
  16956. imageFormatProperties.resize( formatCount );
  16957. }
  16958. return imageFormatProperties;
  16959. }
  16960. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV
  16961. Device::createOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV const & createInfo,
  16962. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  16963. {
  16964. return VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV( *this, createInfo, allocator );
  16965. }
  16966. VULKAN_HPP_INLINE void OpticalFlowSessionNV::bindImage( VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,
  16967. VULKAN_HPP_NAMESPACE::ImageView view,
  16968. VULKAN_HPP_NAMESPACE::ImageLayout layout ) const
  16969. {
  16970. VULKAN_HPP_ASSERT( getDispatcher()->vkBindOpticalFlowSessionImageNV && "Function <vkBindOpticalFlowSessionImageNV> requires <VK_NV_optical_flow>" );
  16971. VkResult result = getDispatcher()->vkBindOpticalFlowSessionImageNV( static_cast<VkDevice>( m_device ),
  16972. static_cast<VkOpticalFlowSessionNV>( m_session ),
  16973. static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ),
  16974. static_cast<VkImageView>( view ),
  16975. static_cast<VkImageLayout>( layout ) );
  16976. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::OpticalFlowSessionNV::bindImage" );
  16977. }
  16978. VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
  16979. const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo ) const VULKAN_HPP_NOEXCEPT
  16980. {
  16981. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdOpticalFlowExecuteNV && "Function <vkCmdOpticalFlowExecuteNV> requires <VK_NV_optical_flow>" );
  16982. getDispatcher()->vkCmdOpticalFlowExecuteNV( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16983. static_cast<VkOpticalFlowSessionNV>( session ),
  16984. reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) );
  16985. }
  16986. //=== VK_KHR_maintenance5 ===
  16987. VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
  16988. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  16989. VULKAN_HPP_NAMESPACE::DeviceSize size,
  16990. VULKAN_HPP_NAMESPACE::IndexType indexType ) const VULKAN_HPP_NOEXCEPT
  16991. {
  16992. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindIndexBuffer2KHR && "Function <vkCmdBindIndexBuffer2KHR> requires <VK_KHR_maintenance5>" );
  16993. getDispatcher()->vkCmdBindIndexBuffer2KHR( static_cast<VkCommandBuffer>( m_commandBuffer ),
  16994. static_cast<VkBuffer>( buffer ),
  16995. static_cast<VkDeviceSize>( offset ),
  16996. static_cast<VkDeviceSize>( size ),
  16997. static_cast<VkIndexType>( indexType ) );
  16998. }
  16999. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D
  17000. Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo ) const VULKAN_HPP_NOEXCEPT
  17001. {
  17002. VULKAN_HPP_ASSERT( getDispatcher()->vkGetRenderingAreaGranularityKHR && "Function <vkGetRenderingAreaGranularityKHR> requires <VK_KHR_maintenance5>" );
  17003. VULKAN_HPP_NAMESPACE::Extent2D granularity;
  17004. getDispatcher()->vkGetRenderingAreaGranularityKHR( static_cast<VkDevice>( m_device ),
  17005. reinterpret_cast<const VkRenderingAreaInfoKHR *>( &renderingAreaInfo ),
  17006. reinterpret_cast<VkExtent2D *>( &granularity ) );
  17007. return granularity;
  17008. }
  17009. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
  17010. Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
  17011. {
  17012. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR &&
  17013. "Function <vkGetDeviceImageSubresourceLayoutKHR> requires <VK_KHR_maintenance5>" );
  17014. VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
  17015. getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR( static_cast<VkDevice>( m_device ),
  17016. reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ),
  17017. reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
  17018. return layout;
  17019. }
  17020. template <typename X, typename Y, typename... Z>
  17021. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  17022. Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info ) const VULKAN_HPP_NOEXCEPT
  17023. {
  17024. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR &&
  17025. "Function <vkGetDeviceImageSubresourceLayoutKHR> requires <VK_KHR_maintenance5>" );
  17026. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  17027. VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
  17028. getDispatcher()->vkGetDeviceImageSubresourceLayoutKHR( static_cast<VkDevice>( m_device ),
  17029. reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ),
  17030. reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
  17031. return structureChain;
  17032. }
  17033. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
  17034. Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT
  17035. {
  17036. VULKAN_HPP_ASSERT(
  17037. getDispatcher()->vkGetImageSubresourceLayout2KHR &&
  17038. "Function <vkGetImageSubresourceLayout2KHR> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
  17039. VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
  17040. getDispatcher()->vkGetImageSubresourceLayout2KHR( static_cast<VkDevice>( m_device ),
  17041. static_cast<VkImage>( m_image ),
  17042. reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
  17043. reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
  17044. return layout;
  17045. }
  17046. template <typename X, typename Y, typename... Z>
  17047. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  17048. Image::getSubresourceLayout2KHR( const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource ) const VULKAN_HPP_NOEXCEPT
  17049. {
  17050. VULKAN_HPP_ASSERT(
  17051. getDispatcher()->vkGetImageSubresourceLayout2KHR &&
  17052. "Function <vkGetImageSubresourceLayout2KHR> requires <VK_EXT_host_image_copy> or <VK_EXT_image_compression_control> or <VK_KHR_maintenance5>" );
  17053. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  17054. VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
  17055. getDispatcher()->vkGetImageSubresourceLayout2KHR( static_cast<VkDevice>( m_device ),
  17056. static_cast<VkImage>( m_image ),
  17057. reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
  17058. reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
  17059. return structureChain;
  17060. }
  17061. //=== VK_EXT_shader_object ===
  17062. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_RAII_NAMESPACE::ShaderEXT>
  17063. Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
  17064. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  17065. {
  17066. return VULKAN_HPP_RAII_NAMESPACE::ShaderEXTs( *this, createInfos, allocator );
  17067. }
  17068. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_RAII_NAMESPACE::ShaderEXT
  17069. Device::createShaderEXT( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT const & createInfo,
  17070. VULKAN_HPP_NAMESPACE::Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator ) const
  17071. {
  17072. return VULKAN_HPP_RAII_NAMESPACE::ShaderEXT( *this, createInfo, allocator );
  17073. }
  17074. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<uint8_t> ShaderEXT::getBinaryData() const
  17075. {
  17076. VULKAN_HPP_ASSERT( getDispatcher()->vkGetShaderBinaryDataEXT && "Function <vkGetShaderBinaryDataEXT> requires <VK_EXT_shader_object>" );
  17077. std::vector<uint8_t> data;
  17078. size_t dataSize;
  17079. VkResult result;
  17080. do
  17081. {
  17082. result = getDispatcher()->vkGetShaderBinaryDataEXT( static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( m_shader ), &dataSize, nullptr );
  17083. if ( ( result == VK_SUCCESS ) && dataSize )
  17084. {
  17085. data.resize( dataSize );
  17086. result = getDispatcher()->vkGetShaderBinaryDataEXT(
  17087. static_cast<VkDevice>( m_device ), static_cast<VkShaderEXT>( m_shader ), &dataSize, reinterpret_cast<void *>( data.data() ) );
  17088. }
  17089. } while ( result == VK_INCOMPLETE );
  17090. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::ShaderEXT::getBinaryData" );
  17091. VULKAN_HPP_ASSERT( dataSize <= data.size() );
  17092. if ( dataSize < data.size() )
  17093. {
  17094. data.resize( dataSize );
  17095. }
  17096. return data;
  17097. }
  17098. VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,
  17099. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders ) const
  17100. {
  17101. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdBindShadersEXT && "Function <vkCmdBindShadersEXT> requires <VK_EXT_shader_object>" );
  17102. if ( stages.size() != shaders.size() )
  17103. {
  17104. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" );
  17105. }
  17106. getDispatcher()->vkCmdBindShadersEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  17107. stages.size(),
  17108. reinterpret_cast<const VkShaderStageFlagBits *>( stages.data() ),
  17109. reinterpret_cast<const VkShaderEXT *>( shaders.data() ) );
  17110. }
  17111. //=== VK_QCOM_tile_properties ===
  17112. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM> Framebuffer::getTilePropertiesQCOM() const
  17113. {
  17114. VULKAN_HPP_ASSERT( getDispatcher()->vkGetFramebufferTilePropertiesQCOM &&
  17115. "Function <vkGetFramebufferTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" );
  17116. std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM> properties;
  17117. uint32_t propertiesCount;
  17118. VkResult result;
  17119. do
  17120. {
  17121. result = getDispatcher()->vkGetFramebufferTilePropertiesQCOM(
  17122. static_cast<VkDevice>( m_device ), static_cast<VkFramebuffer>( m_framebuffer ), &propertiesCount, nullptr );
  17123. if ( ( result == VK_SUCCESS ) && propertiesCount )
  17124. {
  17125. properties.resize( propertiesCount );
  17126. result = getDispatcher()->vkGetFramebufferTilePropertiesQCOM( static_cast<VkDevice>( m_device ),
  17127. static_cast<VkFramebuffer>( m_framebuffer ),
  17128. &propertiesCount,
  17129. reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) );
  17130. }
  17131. } while ( result == VK_INCOMPLETE );
  17132. VULKAN_HPP_ASSERT( propertiesCount <= properties.size() );
  17133. if ( propertiesCount < properties.size() )
  17134. {
  17135. properties.resize( propertiesCount );
  17136. }
  17137. return properties;
  17138. }
  17139. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM
  17140. Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo ) const VULKAN_HPP_NOEXCEPT
  17141. {
  17142. VULKAN_HPP_ASSERT( getDispatcher()->vkGetDynamicRenderingTilePropertiesQCOM &&
  17143. "Function <vkGetDynamicRenderingTilePropertiesQCOM> requires <VK_QCOM_tile_properties>" );
  17144. VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties;
  17145. getDispatcher()->vkGetDynamicRenderingTilePropertiesQCOM( static_cast<VkDevice>( m_device ),
  17146. reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ),
  17147. reinterpret_cast<VkTilePropertiesQCOM *>( &properties ) );
  17148. return properties;
  17149. }
  17150. //=== VK_NV_low_latency2 ===
  17151. VULKAN_HPP_INLINE void SwapchainKHR::setLatencySleepModeNV( const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo ) const
  17152. {
  17153. VULKAN_HPP_ASSERT( getDispatcher()->vkSetLatencySleepModeNV && "Function <vkSetLatencySleepModeNV> requires <VK_NV_low_latency2>" );
  17154. VkResult result = getDispatcher()->vkSetLatencySleepModeNV(
  17155. static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( &sleepModeInfo ) );
  17156. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::setLatencySleepModeNV" );
  17157. }
  17158. VULKAN_HPP_INLINE void SwapchainKHR::latencySleepNV( const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo ) const
  17159. {
  17160. VULKAN_HPP_ASSERT( getDispatcher()->vkLatencySleepNV && "Function <vkLatencySleepNV> requires <VK_NV_low_latency2>" );
  17161. VkResult result = getDispatcher()->vkLatencySleepNV(
  17162. static_cast<VkDevice>( m_device ), static_cast<VkSwapchainKHR>( m_swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( &sleepInfo ) );
  17163. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::SwapchainKHR::latencySleepNV" );
  17164. }
  17165. VULKAN_HPP_INLINE void SwapchainKHR::setLatencyMarkerNV( const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo ) const VULKAN_HPP_NOEXCEPT
  17166. {
  17167. VULKAN_HPP_ASSERT( getDispatcher()->vkSetLatencyMarkerNV && "Function <vkSetLatencyMarkerNV> requires <VK_NV_low_latency2>" );
  17168. getDispatcher()->vkSetLatencyMarkerNV( static_cast<VkDevice>( m_device ),
  17169. static_cast<VkSwapchainKHR>( m_swapchain ),
  17170. reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) );
  17171. }
  17172. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<uint32_t, VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV>
  17173. SwapchainKHR::getLatencyTimingsNV() const VULKAN_HPP_NOEXCEPT
  17174. {
  17175. VULKAN_HPP_ASSERT( getDispatcher()->vkGetLatencyTimingsNV && "Function <vkGetLatencyTimingsNV> requires <VK_NV_low_latency2>" );
  17176. std::pair<uint32_t, VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV> data_;
  17177. uint32_t & timingCount = data_.first;
  17178. VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV & latencyMarkerInfo = data_.second;
  17179. getDispatcher()->vkGetLatencyTimingsNV( static_cast<VkDevice>( m_device ),
  17180. static_cast<VkSwapchainKHR>( m_swapchain ),
  17181. &timingCount,
  17182. reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) );
  17183. return data_;
  17184. }
  17185. VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo ) const VULKAN_HPP_NOEXCEPT
  17186. {
  17187. VULKAN_HPP_ASSERT( getDispatcher()->vkQueueNotifyOutOfBandNV && "Function <vkQueueNotifyOutOfBandNV> requires <VK_NV_low_latency2>" );
  17188. getDispatcher()->vkQueueNotifyOutOfBandNV( static_cast<VkQueue>( m_queue ), reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( &queueTypeInfo ) );
  17189. }
  17190. //=== VK_KHR_cooperative_matrix ===
  17191. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR>
  17192. PhysicalDevice::getCooperativeMatrixPropertiesKHR() const
  17193. {
  17194. VULKAN_HPP_ASSERT( getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR &&
  17195. "Function <vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR> requires <VK_KHR_cooperative_matrix>" );
  17196. std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR> properties;
  17197. uint32_t propertyCount;
  17198. VkResult result;
  17199. do
  17200. {
  17201. result =
  17202. getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, nullptr );
  17203. if ( ( result == VK_SUCCESS ) && propertyCount )
  17204. {
  17205. properties.resize( propertyCount );
  17206. result = getDispatcher()->vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR(
  17207. static_cast<VkPhysicalDevice>( m_physicalDevice ), &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( properties.data() ) );
  17208. }
  17209. } while ( result == VK_INCOMPLETE );
  17210. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" );
  17211. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  17212. if ( propertyCount < properties.size() )
  17213. {
  17214. properties.resize( propertyCount );
  17215. }
  17216. return properties;
  17217. }
  17218. //=== VK_EXT_attachment_feedback_loop_dynamic_state ===
  17219. VULKAN_HPP_INLINE void CommandBuffer::setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask ) const VULKAN_HPP_NOEXCEPT
  17220. {
  17221. VULKAN_HPP_ASSERT( getDispatcher()->vkCmdSetAttachmentFeedbackLoopEnableEXT &&
  17222. "Function <vkCmdSetAttachmentFeedbackLoopEnableEXT> requires <VK_EXT_attachment_feedback_loop_dynamic_state>" );
  17223. getDispatcher()->vkCmdSetAttachmentFeedbackLoopEnableEXT( static_cast<VkCommandBuffer>( m_commandBuffer ),
  17224. static_cast<VkImageAspectFlags>( aspectMask ) );
  17225. }
  17226. # if defined( VK_USE_PLATFORM_SCREEN_QNX )
  17227. //=== VK_QNX_external_memory_screen_buffer ===
  17228. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX
  17229. Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const
  17230. {
  17231. VULKAN_HPP_ASSERT( getDispatcher()->vkGetScreenBufferPropertiesQNX &&
  17232. "Function <vkGetScreenBufferPropertiesQNX> requires <VK_QNX_external_memory_screen_buffer>" );
  17233. VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX properties;
  17234. VkResult result = getDispatcher()->vkGetScreenBufferPropertiesQNX(
  17235. static_cast<VkDevice>( m_device ), &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) );
  17236. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" );
  17237. return properties;
  17238. }
  17239. template <typename X, typename Y, typename... Z>
  17240. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  17241. Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer ) const
  17242. {
  17243. VULKAN_HPP_ASSERT( getDispatcher()->vkGetScreenBufferPropertiesQNX &&
  17244. "Function <vkGetScreenBufferPropertiesQNX> requires <VK_QNX_external_memory_screen_buffer>" );
  17245. StructureChain<X, Y, Z...> structureChain;
  17246. VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>();
  17247. VkResult result = getDispatcher()->vkGetScreenBufferPropertiesQNX(
  17248. static_cast<VkDevice>( m_device ), &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) );
  17249. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" );
  17250. return structureChain;
  17251. }
  17252. # endif /*VK_USE_PLATFORM_SCREEN_QNX*/
  17253. //====================
  17254. //=== RAII Helpers ===
  17255. //====================
  17256. template <typename RAIIType>
  17257. std::vector<typename RAIIType::CppType> filterCppTypes( std::vector<RAIIType> const & raiiTypes )
  17258. {
  17259. std::vector<typename RAIIType::CppType> cppTypes( raiiTypes.size() );
  17260. std::transform( raiiTypes.begin(), raiiTypes.end(), cppTypes.begin(), []( RAIIType const & d ) { return *d; } );
  17261. return cppTypes;
  17262. }
  17263. template <typename RAIIType, class UnaryPredicate>
  17264. std::vector<typename RAIIType::CppType> filterCppTypes( std::vector<RAIIType> const & raiiTypes, UnaryPredicate p )
  17265. {
  17266. std::vector<typename RAIIType::CppType> cppTypes;
  17267. for ( auto const & t : raiiTypes )
  17268. {
  17269. if ( p( t ) )
  17270. {
  17271. cppTypes.push_back( *t );
  17272. }
  17273. }
  17274. return cppTypes;
  17275. }
  17276. } // namespace VULKAN_HPP_RAII_NAMESPACE
  17277. } // namespace VULKAN_HPP_NAMESPACE
  17278. #endif
  17279. #endif