vulkan_funcs.hpp 1.2 MB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837883888398840884188428843884488458846884788488849885088518852885388548855885688578858885988608861886288638864886588668867886888698870887188728873887488758876887788788879888088818882888388848885888688878888888988908891889288938894889588968897889888998900890189028903890489058906890789088909891089118912891389148915891689178918891989208921892289238924892589268927892889298930893189328933893489358936893789388939894089418942894389448945894689478948894989508951895289538954895589568957895889598960896189628963896489658966896789688969897089718972897389748975897689778978897989808981898289838984898589868987898889898990899189928993899489958996899789988999900090019002900390049005900690079008900990109011901290139014901590169017901890199020902190229023902490259026902790289029903090319032903390349035903690379038903990409041904290439044904590469047904890499050905190529053905490559056905790589059906090619062906390649065906690679068906990709071907290739074907590769077907890799080908190829083908490859086908790889089909090919092909390949095909690979098909991009101910291039104910591069107910891099110911191129113911491159116911791189119912091219122912391249125912691279128912991309131913291339134913591369137913891399140914191429143914491459146914791489149915091519152915391549155915691579158915991609161916291639164916591669167916891699170917191729173917491759176917791789179918091819182918391849185918691879188918991909191919291939194919591969197919891999200920192029203920492059206920792089209921092119212921392149215921692179218921992209221922292239224922592269227922892299230923192329233923492359236923792389239924092419242924392449245924692479248924992509251925292539254925592569257925892599260926192629263926492659266926792689269927092719272927392749275927692779278927992809281928292839284928592869287928892899290929192929293929492959296929792989299930093019302930393049305930693079308930993109311931293139314931593169317931893199320932193229323932493259326932793289329933093319332933393349335933693379338933993409341934293439344934593469347934893499350935193529353935493559356935793589359936093619362936393649365936693679368936993709371937293739374937593769377937893799380938193829383938493859386938793889389939093919392939393949395939693979398939994009401940294039404940594069407940894099410941194129413941494159416941794189419942094219422942394249425942694279428942994309431943294339434943594369437943894399440944194429443944494459446944794489449945094519452945394549455945694579458945994609461946294639464946594669467946894699470947194729473947494759476947794789479948094819482948394849485948694879488948994909491949294939494949594969497949894999500950195029503950495059506950795089509951095119512951395149515951695179518951995209521952295239524952595269527952895299530953195329533953495359536953795389539954095419542954395449545954695479548954995509551955295539554955595569557955895599560956195629563956495659566956795689569957095719572957395749575957695779578957995809581958295839584958595869587958895899590959195929593959495959596959795989599960096019602960396049605960696079608960996109611961296139614961596169617961896199620962196229623962496259626962796289629963096319632963396349635963696379638963996409641964296439644964596469647964896499650965196529653965496559656965796589659966096619662966396649665966696679668966996709671967296739674967596769677967896799680968196829683968496859686968796889689969096919692969396949695969696979698969997009701970297039704970597069707970897099710971197129713971497159716971797189719972097219722972397249725972697279728972997309731973297339734973597369737973897399740974197429743974497459746974797489749975097519752975397549755975697579758975997609761976297639764976597669767976897699770977197729773977497759776977797789779978097819782978397849785978697879788978997909791979297939794979597969797979897999800980198029803980498059806980798089809981098119812981398149815981698179818981998209821982298239824982598269827982898299830983198329833983498359836983798389839984098419842984398449845984698479848984998509851985298539854985598569857985898599860986198629863986498659866986798689869987098719872987398749875987698779878987998809881988298839884988598869887988898899890989198929893989498959896989798989899990099019902990399049905990699079908990999109911991299139914991599169917991899199920992199229923992499259926992799289929993099319932993399349935993699379938993999409941994299439944994599469947994899499950995199529953995499559956995799589959996099619962996399649965996699679968996999709971997299739974997599769977997899799980998199829983998499859986998799889989999099919992999399949995999699979998999910000100011000210003100041000510006100071000810009100101001110012100131001410015100161001710018100191002010021100221002310024100251002610027100281002910030100311003210033100341003510036100371003810039100401004110042100431004410045100461004710048100491005010051100521005310054100551005610057100581005910060100611006210063100641006510066100671006810069100701007110072100731007410075100761007710078100791008010081100821008310084100851008610087100881008910090100911009210093100941009510096100971009810099101001010110102101031010410105101061010710108101091011010111101121011310114101151011610117101181011910120101211012210123101241012510126101271012810129101301013110132101331013410135101361013710138101391014010141101421014310144101451014610147101481014910150101511015210153101541015510156101571015810159101601016110162101631016410165101661016710168101691017010171101721017310174101751017610177101781017910180101811018210183101841018510186101871018810189101901019110192101931019410195101961019710198101991020010201102021020310204102051020610207102081020910210102111021210213102141021510216102171021810219102201022110222102231022410225102261022710228102291023010231102321023310234102351023610237102381023910240102411024210243102441024510246102471024810249102501025110252102531025410255102561025710258102591026010261102621026310264102651026610267102681026910270102711027210273102741027510276102771027810279102801028110282102831028410285102861028710288102891029010291102921029310294102951029610297102981029910300103011030210303103041030510306103071030810309103101031110312103131031410315103161031710318103191032010321103221032310324103251032610327103281032910330103311033210333103341033510336103371033810339103401034110342103431034410345103461034710348103491035010351103521035310354103551035610357103581035910360103611036210363103641036510366103671036810369103701037110372103731037410375103761037710378103791038010381103821038310384103851038610387103881038910390103911039210393103941039510396103971039810399104001040110402104031040410405104061040710408104091041010411104121041310414104151041610417104181041910420104211042210423104241042510426104271042810429104301043110432104331043410435104361043710438104391044010441104421044310444104451044610447104481044910450104511045210453104541045510456104571045810459104601046110462104631046410465104661046710468104691047010471104721047310474104751047610477104781047910480104811048210483104841048510486104871048810489104901049110492104931049410495104961049710498104991050010501105021050310504105051050610507105081050910510105111051210513105141051510516105171051810519105201052110522105231052410525105261052710528105291053010531105321053310534105351053610537105381053910540105411054210543105441054510546105471054810549105501055110552105531055410555105561055710558105591056010561105621056310564105651056610567105681056910570105711057210573105741057510576105771057810579105801058110582105831058410585105861058710588105891059010591105921059310594105951059610597105981059910600106011060210603106041060510606106071060810609106101061110612106131061410615106161061710618106191062010621106221062310624106251062610627106281062910630106311063210633106341063510636106371063810639106401064110642106431064410645106461064710648106491065010651106521065310654106551065610657106581065910660106611066210663106641066510666106671066810669106701067110672106731067410675106761067710678106791068010681106821068310684106851068610687106881068910690106911069210693106941069510696106971069810699107001070110702107031070410705107061070710708107091071010711107121071310714107151071610717107181071910720107211072210723107241072510726107271072810729107301073110732107331073410735107361073710738107391074010741107421074310744107451074610747107481074910750107511075210753107541075510756107571075810759107601076110762107631076410765107661076710768107691077010771107721077310774107751077610777107781077910780107811078210783107841078510786107871078810789107901079110792107931079410795107961079710798107991080010801108021080310804108051080610807108081080910810108111081210813108141081510816108171081810819108201082110822108231082410825108261082710828108291083010831108321083310834108351083610837108381083910840108411084210843108441084510846108471084810849108501085110852108531085410855108561085710858108591086010861108621086310864108651086610867108681086910870108711087210873108741087510876108771087810879108801088110882108831088410885108861088710888108891089010891108921089310894108951089610897108981089910900109011090210903109041090510906109071090810909109101091110912109131091410915109161091710918109191092010921109221092310924109251092610927109281092910930109311093210933109341093510936109371093810939109401094110942109431094410945109461094710948109491095010951109521095310954109551095610957109581095910960109611096210963109641096510966109671096810969109701097110972109731097410975109761097710978109791098010981109821098310984109851098610987109881098910990109911099210993109941099510996109971099810999110001100111002110031100411005110061100711008110091101011011110121101311014110151101611017110181101911020110211102211023110241102511026110271102811029110301103111032110331103411035110361103711038110391104011041110421104311044110451104611047110481104911050110511105211053110541105511056110571105811059110601106111062110631106411065110661106711068110691107011071110721107311074110751107611077110781107911080110811108211083110841108511086110871108811089110901109111092110931109411095110961109711098110991110011101111021110311104111051110611107111081110911110111111111211113111141111511116111171111811119111201112111122111231112411125111261112711128111291113011131111321113311134111351113611137111381113911140111411114211143111441114511146111471114811149111501115111152111531115411155111561115711158111591116011161111621116311164111651116611167111681116911170111711117211173111741117511176111771117811179111801118111182111831118411185111861118711188111891119011191111921119311194111951119611197111981119911200112011120211203112041120511206112071120811209112101121111212112131121411215112161121711218112191122011221112221122311224112251122611227112281122911230112311123211233112341123511236112371123811239112401124111242112431124411245112461124711248112491125011251112521125311254112551125611257112581125911260112611126211263112641126511266112671126811269112701127111272112731127411275112761127711278112791128011281112821128311284112851128611287112881128911290112911129211293112941129511296112971129811299113001130111302113031130411305113061130711308113091131011311113121131311314113151131611317113181131911320113211132211323113241132511326113271132811329113301133111332113331133411335113361133711338113391134011341113421134311344113451134611347113481134911350113511135211353113541135511356113571135811359113601136111362113631136411365113661136711368113691137011371113721137311374113751137611377113781137911380113811138211383113841138511386113871138811389113901139111392113931139411395113961139711398113991140011401114021140311404114051140611407114081140911410114111141211413114141141511416114171141811419114201142111422114231142411425114261142711428114291143011431114321143311434114351143611437114381143911440114411144211443114441144511446114471144811449114501145111452114531145411455114561145711458114591146011461114621146311464114651146611467114681146911470114711147211473114741147511476114771147811479114801148111482114831148411485114861148711488114891149011491114921149311494114951149611497114981149911500115011150211503115041150511506115071150811509115101151111512115131151411515115161151711518115191152011521115221152311524115251152611527115281152911530115311153211533115341153511536115371153811539115401154111542115431154411545115461154711548115491155011551115521155311554115551155611557115581155911560115611156211563115641156511566115671156811569115701157111572115731157411575115761157711578115791158011581115821158311584115851158611587115881158911590115911159211593115941159511596115971159811599116001160111602116031160411605116061160711608116091161011611116121161311614116151161611617116181161911620116211162211623116241162511626116271162811629116301163111632116331163411635116361163711638116391164011641116421164311644116451164611647116481164911650116511165211653116541165511656116571165811659116601166111662116631166411665116661166711668116691167011671116721167311674116751167611677116781167911680116811168211683116841168511686116871168811689116901169111692116931169411695116961169711698116991170011701117021170311704117051170611707117081170911710117111171211713117141171511716117171171811719117201172111722117231172411725117261172711728117291173011731117321173311734117351173611737117381173911740117411174211743117441174511746117471174811749117501175111752117531175411755117561175711758117591176011761117621176311764117651176611767117681176911770117711177211773117741177511776117771177811779117801178111782117831178411785117861178711788117891179011791117921179311794117951179611797117981179911800118011180211803118041180511806118071180811809118101181111812118131181411815118161181711818118191182011821118221182311824118251182611827118281182911830118311183211833118341183511836118371183811839118401184111842118431184411845118461184711848118491185011851118521185311854118551185611857118581185911860118611186211863118641186511866118671186811869118701187111872118731187411875118761187711878118791188011881118821188311884118851188611887118881188911890118911189211893118941189511896118971189811899119001190111902119031190411905119061190711908119091191011911119121191311914119151191611917119181191911920119211192211923119241192511926119271192811929119301193111932119331193411935119361193711938119391194011941119421194311944119451194611947119481194911950119511195211953119541195511956119571195811959119601196111962119631196411965119661196711968119691197011971119721197311974119751197611977119781197911980119811198211983119841198511986119871198811989119901199111992119931199411995119961199711998119991200012001120021200312004120051200612007120081200912010120111201212013120141201512016120171201812019120201202112022120231202412025120261202712028120291203012031120321203312034120351203612037120381203912040120411204212043120441204512046120471204812049120501205112052120531205412055120561205712058120591206012061120621206312064120651206612067120681206912070120711207212073120741207512076120771207812079120801208112082120831208412085120861208712088120891209012091120921209312094120951209612097120981209912100121011210212103121041210512106121071210812109121101211112112121131211412115121161211712118121191212012121121221212312124121251212612127121281212912130121311213212133121341213512136121371213812139121401214112142121431214412145121461214712148121491215012151121521215312154121551215612157121581215912160121611216212163121641216512166121671216812169121701217112172121731217412175121761217712178121791218012181121821218312184121851218612187121881218912190121911219212193121941219512196121971219812199122001220112202122031220412205122061220712208122091221012211122121221312214122151221612217122181221912220122211222212223122241222512226122271222812229122301223112232122331223412235122361223712238122391224012241122421224312244122451224612247122481224912250122511225212253122541225512256122571225812259122601226112262122631226412265122661226712268122691227012271122721227312274122751227612277122781227912280122811228212283122841228512286122871228812289122901229112292122931229412295122961229712298122991230012301123021230312304123051230612307123081230912310123111231212313123141231512316123171231812319123201232112322123231232412325123261232712328123291233012331123321233312334123351233612337123381233912340123411234212343123441234512346123471234812349123501235112352123531235412355123561235712358123591236012361123621236312364123651236612367123681236912370123711237212373123741237512376123771237812379123801238112382123831238412385123861238712388123891239012391123921239312394123951239612397123981239912400124011240212403124041240512406124071240812409124101241112412124131241412415124161241712418124191242012421124221242312424124251242612427124281242912430124311243212433124341243512436124371243812439124401244112442124431244412445124461244712448124491245012451124521245312454124551245612457124581245912460124611246212463124641246512466124671246812469124701247112472124731247412475124761247712478124791248012481124821248312484124851248612487124881248912490124911249212493124941249512496124971249812499125001250112502125031250412505125061250712508125091251012511125121251312514125151251612517125181251912520125211252212523125241252512526125271252812529125301253112532125331253412535125361253712538125391254012541125421254312544125451254612547125481254912550125511255212553125541255512556125571255812559125601256112562125631256412565125661256712568125691257012571125721257312574125751257612577125781257912580125811258212583125841258512586125871258812589125901259112592125931259412595125961259712598125991260012601126021260312604126051260612607126081260912610126111261212613126141261512616126171261812619126201262112622126231262412625126261262712628126291263012631126321263312634126351263612637126381263912640126411264212643126441264512646126471264812649126501265112652126531265412655126561265712658126591266012661126621266312664126651266612667126681266912670126711267212673126741267512676126771267812679126801268112682126831268412685126861268712688126891269012691126921269312694126951269612697126981269912700127011270212703127041270512706127071270812709127101271112712127131271412715127161271712718127191272012721127221272312724127251272612727127281272912730127311273212733127341273512736127371273812739127401274112742127431274412745127461274712748127491275012751127521275312754127551275612757127581275912760127611276212763127641276512766127671276812769127701277112772127731277412775127761277712778127791278012781127821278312784127851278612787127881278912790127911279212793127941279512796127971279812799128001280112802128031280412805128061280712808128091281012811128121281312814128151281612817128181281912820128211282212823128241282512826128271282812829128301283112832128331283412835128361283712838128391284012841128421284312844128451284612847128481284912850128511285212853128541285512856128571285812859128601286112862128631286412865128661286712868128691287012871128721287312874128751287612877128781287912880128811288212883128841288512886128871288812889128901289112892128931289412895128961289712898128991290012901129021290312904129051290612907129081290912910129111291212913129141291512916129171291812919129201292112922129231292412925129261292712928129291293012931129321293312934129351293612937129381293912940129411294212943129441294512946129471294812949129501295112952129531295412955129561295712958129591296012961129621296312964129651296612967129681296912970129711297212973129741297512976129771297812979129801298112982129831298412985129861298712988129891299012991129921299312994129951299612997129981299913000130011300213003130041300513006130071300813009130101301113012130131301413015130161301713018130191302013021130221302313024130251302613027130281302913030130311303213033130341303513036130371303813039130401304113042130431304413045130461304713048130491305013051130521305313054130551305613057130581305913060130611306213063130641306513066130671306813069130701307113072130731307413075130761307713078130791308013081130821308313084130851308613087130881308913090130911309213093130941309513096130971309813099131001310113102131031310413105131061310713108131091311013111131121311313114131151311613117131181311913120131211312213123131241312513126131271312813129131301313113132131331313413135131361313713138131391314013141131421314313144131451314613147131481314913150131511315213153131541315513156131571315813159131601316113162131631316413165131661316713168131691317013171131721317313174131751317613177131781317913180131811318213183131841318513186131871318813189131901319113192131931319413195131961319713198131991320013201132021320313204132051320613207132081320913210132111321213213132141321513216132171321813219132201322113222132231322413225132261322713228132291323013231132321323313234132351323613237132381323913240132411324213243132441324513246132471324813249132501325113252132531325413255132561325713258132591326013261132621326313264132651326613267132681326913270132711327213273132741327513276132771327813279132801328113282132831328413285132861328713288132891329013291132921329313294132951329613297132981329913300133011330213303133041330513306133071330813309133101331113312133131331413315133161331713318133191332013321133221332313324133251332613327133281332913330133311333213333133341333513336133371333813339133401334113342133431334413345133461334713348133491335013351133521335313354133551335613357133581335913360133611336213363133641336513366133671336813369133701337113372133731337413375133761337713378133791338013381133821338313384133851338613387133881338913390133911339213393133941339513396133971339813399134001340113402134031340413405134061340713408134091341013411134121341313414134151341613417134181341913420134211342213423134241342513426134271342813429134301343113432134331343413435134361343713438134391344013441134421344313444134451344613447134481344913450134511345213453134541345513456134571345813459134601346113462134631346413465134661346713468134691347013471134721347313474134751347613477134781347913480134811348213483134841348513486134871348813489134901349113492134931349413495134961349713498134991350013501135021350313504135051350613507135081350913510135111351213513135141351513516135171351813519135201352113522135231352413525135261352713528135291353013531135321353313534135351353613537135381353913540135411354213543135441354513546135471354813549135501355113552135531355413555135561355713558135591356013561135621356313564135651356613567135681356913570135711357213573135741357513576135771357813579135801358113582135831358413585135861358713588135891359013591135921359313594135951359613597135981359913600136011360213603136041360513606136071360813609136101361113612136131361413615136161361713618136191362013621136221362313624136251362613627136281362913630136311363213633136341363513636136371363813639136401364113642136431364413645136461364713648136491365013651136521365313654136551365613657136581365913660136611366213663136641366513666136671366813669136701367113672136731367413675136761367713678136791368013681136821368313684136851368613687136881368913690136911369213693136941369513696136971369813699137001370113702137031370413705137061370713708137091371013711137121371313714137151371613717137181371913720137211372213723137241372513726137271372813729137301373113732137331373413735137361373713738137391374013741137421374313744137451374613747137481374913750137511375213753137541375513756137571375813759137601376113762137631376413765137661376713768137691377013771137721377313774137751377613777137781377913780137811378213783137841378513786137871378813789137901379113792137931379413795137961379713798137991380013801138021380313804138051380613807138081380913810138111381213813138141381513816138171381813819138201382113822138231382413825138261382713828138291383013831138321383313834138351383613837138381383913840138411384213843138441384513846138471384813849138501385113852138531385413855138561385713858138591386013861138621386313864138651386613867138681386913870138711387213873138741387513876138771387813879138801388113882138831388413885138861388713888138891389013891138921389313894138951389613897138981389913900139011390213903139041390513906139071390813909139101391113912139131391413915139161391713918139191392013921139221392313924139251392613927139281392913930139311393213933139341393513936139371393813939139401394113942139431394413945139461394713948139491395013951139521395313954139551395613957139581395913960139611396213963139641396513966139671396813969139701397113972139731397413975139761397713978139791398013981139821398313984139851398613987139881398913990139911399213993139941399513996139971399813999140001400114002140031400414005140061400714008140091401014011140121401314014140151401614017140181401914020140211402214023140241402514026140271402814029140301403114032140331403414035140361403714038140391404014041140421404314044140451404614047140481404914050140511405214053140541405514056140571405814059140601406114062140631406414065140661406714068140691407014071140721407314074140751407614077140781407914080140811408214083140841408514086140871408814089140901409114092140931409414095140961409714098140991410014101141021410314104141051410614107141081410914110141111411214113141141411514116141171411814119141201412114122141231412414125141261412714128141291413014131141321413314134141351413614137141381413914140141411414214143141441414514146141471414814149141501415114152141531415414155141561415714158141591416014161141621416314164141651416614167141681416914170141711417214173141741417514176141771417814179141801418114182141831418414185141861418714188141891419014191141921419314194141951419614197141981419914200142011420214203142041420514206142071420814209142101421114212142131421414215142161421714218142191422014221142221422314224142251422614227142281422914230142311423214233142341423514236142371423814239142401424114242142431424414245142461424714248142491425014251142521425314254142551425614257142581425914260142611426214263142641426514266142671426814269142701427114272142731427414275142761427714278142791428014281142821428314284142851428614287142881428914290142911429214293142941429514296142971429814299143001430114302143031430414305143061430714308143091431014311143121431314314143151431614317143181431914320143211432214323143241432514326143271432814329143301433114332143331433414335143361433714338143391434014341143421434314344143451434614347143481434914350143511435214353143541435514356143571435814359143601436114362143631436414365143661436714368143691437014371143721437314374143751437614377143781437914380143811438214383143841438514386143871438814389143901439114392143931439414395143961439714398143991440014401144021440314404144051440614407144081440914410144111441214413144141441514416144171441814419144201442114422144231442414425144261442714428144291443014431144321443314434144351443614437144381443914440144411444214443144441444514446144471444814449144501445114452144531445414455144561445714458144591446014461144621446314464144651446614467144681446914470144711447214473144741447514476144771447814479144801448114482144831448414485144861448714488144891449014491144921449314494144951449614497144981449914500145011450214503145041450514506145071450814509145101451114512145131451414515145161451714518145191452014521145221452314524145251452614527145281452914530145311453214533145341453514536145371453814539145401454114542145431454414545145461454714548145491455014551145521455314554145551455614557145581455914560145611456214563145641456514566145671456814569145701457114572145731457414575145761457714578145791458014581145821458314584145851458614587145881458914590145911459214593145941459514596145971459814599146001460114602146031460414605146061460714608146091461014611146121461314614146151461614617146181461914620146211462214623146241462514626146271462814629146301463114632146331463414635146361463714638146391464014641146421464314644146451464614647146481464914650146511465214653146541465514656146571465814659146601466114662146631466414665146661466714668146691467014671146721467314674146751467614677146781467914680146811468214683146841468514686146871468814689146901469114692146931469414695146961469714698146991470014701147021470314704147051470614707147081470914710147111471214713147141471514716147171471814719147201472114722147231472414725147261472714728147291473014731147321473314734147351473614737147381473914740147411474214743147441474514746147471474814749147501475114752147531475414755147561475714758147591476014761147621476314764147651476614767147681476914770147711477214773147741477514776147771477814779147801478114782147831478414785147861478714788147891479014791147921479314794147951479614797147981479914800148011480214803148041480514806148071480814809148101481114812148131481414815148161481714818148191482014821148221482314824148251482614827148281482914830148311483214833148341483514836148371483814839148401484114842148431484414845148461484714848148491485014851148521485314854148551485614857148581485914860148611486214863148641486514866148671486814869148701487114872148731487414875148761487714878148791488014881148821488314884148851488614887148881488914890148911489214893148941489514896148971489814899149001490114902149031490414905149061490714908149091491014911149121491314914149151491614917149181491914920149211492214923149241492514926149271492814929149301493114932149331493414935149361493714938149391494014941149421494314944149451494614947149481494914950149511495214953149541495514956149571495814959149601496114962149631496414965149661496714968149691497014971149721497314974149751497614977149781497914980149811498214983149841498514986149871498814989149901499114992149931499414995149961499714998149991500015001150021500315004150051500615007150081500915010150111501215013150141501515016150171501815019150201502115022150231502415025150261502715028150291503015031150321503315034150351503615037150381503915040150411504215043150441504515046150471504815049150501505115052150531505415055150561505715058150591506015061150621506315064150651506615067150681506915070150711507215073150741507515076150771507815079150801508115082150831508415085150861508715088150891509015091150921509315094150951509615097150981509915100151011510215103151041510515106151071510815109151101511115112151131511415115151161511715118151191512015121151221512315124151251512615127151281512915130151311513215133151341513515136151371513815139151401514115142151431514415145151461514715148151491515015151151521515315154151551515615157151581515915160151611516215163151641516515166151671516815169151701517115172151731517415175151761517715178151791518015181151821518315184151851518615187151881518915190151911519215193151941519515196151971519815199152001520115202152031520415205152061520715208152091521015211152121521315214152151521615217152181521915220152211522215223152241522515226152271522815229152301523115232152331523415235152361523715238152391524015241152421524315244152451524615247152481524915250152511525215253152541525515256152571525815259152601526115262152631526415265152661526715268152691527015271152721527315274152751527615277152781527915280152811528215283152841528515286152871528815289152901529115292152931529415295152961529715298152991530015301153021530315304153051530615307153081530915310153111531215313153141531515316153171531815319153201532115322153231532415325153261532715328153291533015331153321533315334153351533615337153381533915340153411534215343153441534515346153471534815349153501535115352153531535415355153561535715358153591536015361153621536315364153651536615367153681536915370153711537215373153741537515376153771537815379153801538115382153831538415385153861538715388153891539015391153921539315394153951539615397153981539915400154011540215403154041540515406154071540815409154101541115412154131541415415154161541715418154191542015421154221542315424154251542615427154281542915430154311543215433154341543515436154371543815439154401544115442154431544415445154461544715448154491545015451154521545315454154551545615457154581545915460154611546215463154641546515466154671546815469154701547115472154731547415475154761547715478154791548015481154821548315484154851548615487154881548915490154911549215493154941549515496154971549815499155001550115502155031550415505155061550715508155091551015511155121551315514155151551615517155181551915520155211552215523155241552515526155271552815529155301553115532155331553415535155361553715538155391554015541155421554315544155451554615547155481554915550155511555215553155541555515556155571555815559155601556115562155631556415565155661556715568155691557015571155721557315574155751557615577155781557915580155811558215583155841558515586155871558815589155901559115592155931559415595155961559715598155991560015601156021560315604156051560615607156081560915610156111561215613156141561515616156171561815619156201562115622156231562415625156261562715628156291563015631156321563315634156351563615637156381563915640156411564215643156441564515646156471564815649156501565115652156531565415655156561565715658156591566015661156621566315664156651566615667156681566915670156711567215673156741567515676156771567815679156801568115682156831568415685156861568715688156891569015691156921569315694156951569615697156981569915700157011570215703157041570515706157071570815709157101571115712157131571415715157161571715718157191572015721157221572315724157251572615727157281572915730157311573215733157341573515736157371573815739157401574115742157431574415745157461574715748157491575015751157521575315754157551575615757157581575915760157611576215763157641576515766157671576815769157701577115772157731577415775157761577715778157791578015781157821578315784157851578615787157881578915790157911579215793157941579515796157971579815799158001580115802158031580415805158061580715808158091581015811158121581315814158151581615817158181581915820158211582215823158241582515826158271582815829158301583115832158331583415835158361583715838158391584015841158421584315844158451584615847158481584915850158511585215853158541585515856158571585815859158601586115862158631586415865158661586715868158691587015871158721587315874158751587615877158781587915880158811588215883158841588515886158871588815889158901589115892158931589415895158961589715898158991590015901159021590315904159051590615907159081590915910159111591215913159141591515916159171591815919159201592115922159231592415925159261592715928159291593015931159321593315934159351593615937159381593915940159411594215943159441594515946159471594815949159501595115952159531595415955159561595715958159591596015961159621596315964159651596615967159681596915970159711597215973159741597515976159771597815979159801598115982159831598415985159861598715988159891599015991159921599315994159951599615997159981599916000160011600216003160041600516006160071600816009160101601116012160131601416015160161601716018160191602016021160221602316024160251602616027160281602916030160311603216033160341603516036160371603816039160401604116042160431604416045160461604716048160491605016051160521605316054160551605616057160581605916060160611606216063160641606516066160671606816069160701607116072160731607416075160761607716078160791608016081160821608316084160851608616087160881608916090160911609216093160941609516096160971609816099161001610116102161031610416105161061610716108161091611016111161121611316114161151611616117161181611916120161211612216123161241612516126161271612816129161301613116132161331613416135161361613716138161391614016141161421614316144161451614616147161481614916150161511615216153161541615516156161571615816159161601616116162161631616416165161661616716168161691617016171161721617316174161751617616177161781617916180161811618216183161841618516186161871618816189161901619116192161931619416195161961619716198161991620016201162021620316204162051620616207162081620916210162111621216213162141621516216162171621816219162201622116222162231622416225162261622716228162291623016231162321623316234162351623616237162381623916240162411624216243162441624516246162471624816249162501625116252162531625416255162561625716258162591626016261162621626316264162651626616267162681626916270162711627216273162741627516276162771627816279162801628116282162831628416285162861628716288162891629016291162921629316294162951629616297162981629916300163011630216303163041630516306163071630816309163101631116312163131631416315163161631716318163191632016321163221632316324163251632616327163281632916330163311633216333163341633516336163371633816339163401634116342163431634416345163461634716348163491635016351163521635316354163551635616357163581635916360163611636216363163641636516366163671636816369163701637116372163731637416375163761637716378163791638016381163821638316384163851638616387163881638916390163911639216393163941639516396163971639816399164001640116402164031640416405164061640716408164091641016411164121641316414164151641616417164181641916420164211642216423164241642516426164271642816429164301643116432164331643416435164361643716438164391644016441164421644316444164451644616447164481644916450164511645216453164541645516456164571645816459164601646116462164631646416465164661646716468164691647016471164721647316474164751647616477164781647916480164811648216483164841648516486164871648816489164901649116492164931649416495164961649716498164991650016501165021650316504165051650616507165081650916510165111651216513165141651516516165171651816519165201652116522165231652416525165261652716528165291653016531165321653316534165351653616537165381653916540165411654216543165441654516546165471654816549165501655116552165531655416555165561655716558165591656016561165621656316564165651656616567165681656916570165711657216573165741657516576165771657816579165801658116582165831658416585165861658716588165891659016591165921659316594165951659616597165981659916600166011660216603166041660516606166071660816609166101661116612166131661416615166161661716618166191662016621166221662316624166251662616627166281662916630166311663216633166341663516636166371663816639166401664116642166431664416645166461664716648166491665016651166521665316654166551665616657166581665916660166611666216663166641666516666166671666816669166701667116672166731667416675166761667716678166791668016681166821668316684166851668616687166881668916690166911669216693166941669516696166971669816699167001670116702167031670416705167061670716708167091671016711167121671316714167151671616717167181671916720167211672216723167241672516726167271672816729167301673116732167331673416735167361673716738167391674016741167421674316744167451674616747167481674916750167511675216753167541675516756167571675816759167601676116762167631676416765167661676716768167691677016771167721677316774167751677616777167781677916780167811678216783167841678516786167871678816789167901679116792167931679416795167961679716798167991680016801168021680316804168051680616807168081680916810168111681216813168141681516816168171681816819168201682116822168231682416825168261682716828168291683016831168321683316834168351683616837168381683916840168411684216843168441684516846168471684816849168501685116852168531685416855168561685716858168591686016861168621686316864168651686616867168681686916870168711687216873168741687516876168771687816879168801688116882168831688416885168861688716888168891689016891168921689316894168951689616897168981689916900169011690216903169041690516906169071690816909169101691116912169131691416915169161691716918169191692016921169221692316924169251692616927169281692916930169311693216933169341693516936169371693816939169401694116942169431694416945169461694716948169491695016951169521695316954169551695616957169581695916960169611696216963169641696516966169671696816969169701697116972169731697416975169761697716978169791698016981169821698316984169851698616987169881698916990169911699216993169941699516996169971699816999170001700117002170031700417005170061700717008170091701017011170121701317014170151701617017170181701917020170211702217023170241702517026170271702817029170301703117032170331703417035170361703717038170391704017041170421704317044170451704617047170481704917050170511705217053170541705517056170571705817059170601706117062170631706417065170661706717068170691707017071170721707317074170751707617077170781707917080170811708217083170841708517086170871708817089170901709117092170931709417095170961709717098170991710017101171021710317104171051710617107171081710917110171111711217113171141711517116171171711817119171201712117122171231712417125171261712717128171291713017131171321713317134171351713617137171381713917140171411714217143171441714517146171471714817149171501715117152171531715417155171561715717158171591716017161171621716317164171651716617167171681716917170171711717217173171741717517176171771717817179171801718117182171831718417185171861718717188171891719017191171921719317194171951719617197171981719917200172011720217203172041720517206172071720817209172101721117212172131721417215172161721717218172191722017221172221722317224172251722617227172281722917230172311723217233172341723517236172371723817239172401724117242172431724417245172461724717248172491725017251172521725317254172551725617257172581725917260172611726217263172641726517266172671726817269172701727117272172731727417275172761727717278172791728017281172821728317284172851728617287172881728917290172911729217293172941729517296172971729817299173001730117302173031730417305173061730717308173091731017311173121731317314173151731617317173181731917320173211732217323173241732517326173271732817329173301733117332173331733417335173361733717338173391734017341173421734317344173451734617347173481734917350173511735217353173541735517356173571735817359173601736117362173631736417365173661736717368173691737017371173721737317374173751737617377173781737917380173811738217383173841738517386173871738817389173901739117392173931739417395173961739717398173991740017401174021740317404174051740617407174081740917410174111741217413174141741517416174171741817419174201742117422174231742417425174261742717428174291743017431174321743317434174351743617437174381743917440174411744217443174441744517446174471744817449174501745117452174531745417455174561745717458174591746017461174621746317464174651746617467174681746917470174711747217473174741747517476174771747817479174801748117482174831748417485174861748717488174891749017491174921749317494174951749617497174981749917500175011750217503175041750517506175071750817509175101751117512175131751417515175161751717518175191752017521175221752317524175251752617527175281752917530175311753217533175341753517536175371753817539175401754117542175431754417545175461754717548175491755017551175521755317554175551755617557175581755917560175611756217563175641756517566175671756817569175701757117572175731757417575175761757717578175791758017581175821758317584175851758617587175881758917590175911759217593175941759517596175971759817599176001760117602176031760417605176061760717608176091761017611176121761317614176151761617617176181761917620176211762217623176241762517626176271762817629176301763117632176331763417635176361763717638176391764017641176421764317644176451764617647176481764917650176511765217653176541765517656176571765817659176601766117662176631766417665176661766717668176691767017671176721767317674176751767617677176781767917680176811768217683176841768517686176871768817689176901769117692176931769417695176961769717698176991770017701177021770317704177051770617707177081770917710177111771217713177141771517716177171771817719177201772117722177231772417725177261772717728177291773017731177321773317734177351773617737177381773917740177411774217743177441774517746177471774817749177501775117752177531775417755177561775717758177591776017761177621776317764177651776617767177681776917770177711777217773177741777517776177771777817779177801778117782177831778417785177861778717788177891779017791177921779317794177951779617797177981779917800178011780217803178041780517806178071780817809178101781117812178131781417815178161781717818178191782017821178221782317824178251782617827178281782917830178311783217833178341783517836178371783817839178401784117842178431784417845178461784717848178491785017851178521785317854178551785617857178581785917860178611786217863178641786517866178671786817869178701787117872178731787417875178761787717878178791788017881178821788317884178851788617887178881788917890178911789217893178941789517896178971789817899179001790117902179031790417905179061790717908179091791017911179121791317914179151791617917179181791917920179211792217923179241792517926179271792817929179301793117932179331793417935179361793717938179391794017941179421794317944179451794617947179481794917950179511795217953179541795517956179571795817959179601796117962179631796417965179661796717968179691797017971179721797317974179751797617977179781797917980179811798217983179841798517986179871798817989179901799117992179931799417995179961799717998179991800018001180021800318004180051800618007180081800918010180111801218013180141801518016180171801818019180201802118022180231802418025180261802718028180291803018031180321803318034180351803618037180381803918040180411804218043180441804518046180471804818049180501805118052180531805418055180561805718058180591806018061180621806318064180651806618067180681806918070180711807218073180741807518076180771807818079180801808118082180831808418085180861808718088180891809018091180921809318094180951809618097180981809918100181011810218103181041810518106181071810818109181101811118112181131811418115181161811718118181191812018121181221812318124181251812618127181281812918130181311813218133181341813518136181371813818139181401814118142181431814418145181461814718148181491815018151181521815318154181551815618157181581815918160181611816218163181641816518166181671816818169181701817118172181731817418175181761817718178181791818018181181821818318184181851818618187181881818918190181911819218193181941819518196181971819818199182001820118202182031820418205182061820718208182091821018211182121821318214182151821618217182181821918220182211822218223182241822518226182271822818229182301823118232182331823418235182361823718238182391824018241182421824318244182451824618247182481824918250182511825218253182541825518256182571825818259182601826118262182631826418265182661826718268182691827018271182721827318274182751827618277182781827918280182811828218283182841828518286182871828818289182901829118292182931829418295182961829718298182991830018301183021830318304183051830618307183081830918310183111831218313183141831518316183171831818319183201832118322183231832418325183261832718328183291833018331183321833318334183351833618337183381833918340183411834218343183441834518346183471834818349183501835118352183531835418355183561835718358183591836018361183621836318364183651836618367183681836918370183711837218373183741837518376183771837818379183801838118382183831838418385183861838718388183891839018391183921839318394183951839618397183981839918400184011840218403184041840518406184071840818409184101841118412184131841418415184161841718418184191842018421184221842318424184251842618427184281842918430184311843218433184341843518436184371843818439184401844118442184431844418445184461844718448184491845018451184521845318454184551845618457184581845918460184611846218463184641846518466184671846818469184701847118472184731847418475184761847718478184791848018481184821848318484184851848618487184881848918490184911849218493184941849518496184971849818499185001850118502185031850418505185061850718508185091851018511185121851318514185151851618517185181851918520185211852218523185241852518526185271852818529185301853118532185331853418535185361853718538185391854018541185421854318544185451854618547185481854918550185511855218553185541855518556185571855818559185601856118562185631856418565185661856718568185691857018571185721857318574185751857618577185781857918580185811858218583185841858518586185871858818589185901859118592185931859418595185961859718598185991860018601186021860318604186051860618607186081860918610186111861218613186141861518616186171861818619186201862118622186231862418625186261862718628186291863018631186321863318634186351863618637186381863918640186411864218643186441864518646186471864818649186501865118652186531865418655186561865718658186591866018661186621866318664186651866618667186681866918670186711867218673186741867518676186771867818679186801868118682186831868418685186861868718688186891869018691186921869318694186951869618697186981869918700187011870218703187041870518706187071870818709187101871118712187131871418715187161871718718187191872018721187221872318724187251872618727187281872918730187311873218733187341873518736187371873818739187401874118742187431874418745187461874718748187491875018751187521875318754187551875618757187581875918760187611876218763187641876518766187671876818769187701877118772187731877418775187761877718778187791878018781187821878318784187851878618787187881878918790187911879218793187941879518796187971879818799188001880118802188031880418805188061880718808188091881018811188121881318814188151881618817188181881918820188211882218823188241882518826188271882818829188301883118832188331883418835188361883718838188391884018841188421884318844188451884618847188481884918850188511885218853188541885518856188571885818859188601886118862188631886418865188661886718868188691887018871188721887318874188751887618877188781887918880188811888218883188841888518886188871888818889188901889118892188931889418895188961889718898188991890018901189021890318904189051890618907189081890918910189111891218913189141891518916189171891818919189201892118922189231892418925189261892718928189291893018931189321893318934189351893618937189381893918940189411894218943189441894518946189471894818949189501895118952189531895418955189561895718958189591896018961189621896318964189651896618967189681896918970189711897218973189741897518976189771897818979189801898118982189831898418985189861898718988189891899018991189921899318994189951899618997189981899919000190011900219003190041900519006190071900819009190101901119012190131901419015190161901719018190191902019021190221902319024190251902619027190281902919030190311903219033190341903519036190371903819039190401904119042190431904419045190461904719048190491905019051190521905319054190551905619057190581905919060190611906219063190641906519066190671906819069190701907119072190731907419075190761907719078190791908019081190821908319084190851908619087190881908919090190911909219093190941909519096190971909819099191001910119102191031910419105191061910719108191091911019111191121911319114191151911619117191181911919120191211912219123191241912519126191271912819129191301913119132191331913419135191361913719138191391914019141191421914319144191451914619147191481914919150191511915219153191541915519156191571915819159191601916119162191631916419165191661916719168191691917019171191721917319174191751917619177191781917919180191811918219183191841918519186191871918819189191901919119192191931919419195191961919719198191991920019201192021920319204192051920619207192081920919210192111921219213192141921519216192171921819219192201922119222192231922419225192261922719228192291923019231192321923319234192351923619237192381923919240192411924219243192441924519246192471924819249192501925119252192531925419255192561925719258192591926019261192621926319264192651926619267192681926919270192711927219273192741927519276192771927819279192801928119282192831928419285192861928719288192891929019291192921929319294192951929619297192981929919300193011930219303193041930519306193071930819309193101931119312193131931419315193161931719318193191932019321193221932319324193251932619327193281932919330193311933219333193341933519336193371933819339193401934119342193431934419345193461934719348193491935019351193521935319354193551935619357193581935919360193611936219363193641936519366193671936819369193701937119372193731937419375193761937719378193791938019381193821938319384193851938619387193881938919390193911939219393193941939519396193971939819399194001940119402194031940419405194061940719408194091941019411194121941319414194151941619417194181941919420194211942219423194241942519426194271942819429194301943119432194331943419435194361943719438194391944019441194421944319444194451944619447194481944919450194511945219453194541945519456194571945819459194601946119462194631946419465194661946719468194691947019471194721947319474194751947619477194781947919480194811948219483194841948519486194871948819489194901949119492194931949419495194961949719498194991950019501195021950319504195051950619507195081950919510195111951219513195141951519516195171951819519195201952119522195231952419525195261952719528195291953019531195321953319534195351953619537195381953919540195411954219543195441954519546195471954819549195501955119552195531955419555195561955719558195591956019561195621956319564195651956619567195681956919570195711957219573195741957519576195771957819579195801958119582195831958419585195861958719588195891959019591195921959319594195951959619597195981959919600196011960219603196041960519606196071960819609196101961119612196131961419615196161961719618196191962019621196221962319624196251962619627196281962919630196311963219633196341963519636196371963819639196401964119642196431964419645196461964719648196491965019651196521965319654196551965619657196581965919660196611966219663196641966519666196671966819669196701967119672196731967419675196761967719678196791968019681196821968319684196851968619687196881968919690196911969219693196941969519696196971969819699197001970119702197031970419705197061970719708197091971019711197121971319714197151971619717197181971919720197211972219723197241972519726197271972819729197301973119732197331973419735197361973719738197391974019741197421974319744197451974619747197481974919750197511975219753197541975519756197571975819759197601976119762197631976419765197661976719768197691977019771197721977319774197751977619777197781977919780197811978219783197841978519786197871978819789197901979119792197931979419795197961979719798197991980019801198021980319804198051980619807198081980919810198111981219813198141981519816198171981819819198201982119822198231982419825198261982719828198291983019831198321983319834198351983619837198381983919840198411984219843198441984519846198471984819849198501985119852198531985419855198561985719858198591986019861198621986319864198651986619867198681986919870198711987219873198741987519876198771987819879198801988119882198831988419885198861988719888198891989019891198921989319894198951989619897198981989919900199011990219903199041990519906199071990819909199101991119912199131991419915199161991719918199191992019921199221992319924199251992619927199281992919930199311993219933199341993519936199371993819939199401994119942199431994419945199461994719948199491995019951199521995319954199551995619957199581995919960199611996219963199641996519966199671996819969199701997119972199731997419975199761997719978199791998019981199821998319984199851998619987199881998919990199911999219993199941999519996199971999819999200002000120002200032000420005200062000720008
  1. // Copyright 2015-2022 The Khronos Group Inc.
  2. //
  3. // SPDX-License-Identifier: Apache-2.0 OR MIT
  4. //
  5. // This header is generated from the Khronos Vulkan XML API Registry.
  6. #ifndef VULKAN_FUNCS_HPP
  7. #define VULKAN_FUNCS_HPP
  8. namespace VULKAN_HPP_NAMESPACE
  9. {
  10. //===========================
  11. //=== COMMAND Definitions ===
  12. //===========================
  13. //=== VK_VERSION_1_0 ===
  14. template <typename Dispatch>
  15. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo,
  16. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  17. VULKAN_HPP_NAMESPACE::Instance * pInstance,
  18. Dispatch const & d ) VULKAN_HPP_NOEXCEPT
  19. {
  20. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  21. return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ),
  22. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  23. reinterpret_cast<VkInstance *>( pInstance ) ) );
  24. }
  25. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  26. template <typename Dispatch>
  27. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance(
  28. const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d )
  29. {
  30. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  31. VULKAN_HPP_NAMESPACE::Instance instance;
  32. VkResult result =
  33. d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ),
  34. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  35. reinterpret_cast<VkInstance *>( &instance ) );
  36. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstance" );
  37. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), instance );
  38. }
  39. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  40. template <typename Dispatch>
  41. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type createInstanceUnique(
  42. const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d )
  43. {
  44. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  45. VULKAN_HPP_NAMESPACE::Instance instance;
  46. VkResult result =
  47. d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ),
  48. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  49. reinterpret_cast<VkInstance *>( &instance ) );
  50. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" );
  51. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  52. UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>( instance, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) );
  53. }
  54. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  55. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  56. template <typename Dispatch>
  57. VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  58. {
  59. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  60. d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  61. }
  62. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63. template <typename Dispatch>
  64. VULKAN_HPP_INLINE void Instance::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65. {
  66. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  67. d.vkDestroyInstance( m_instance,
  68. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  69. }
  70. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71. template <typename Dispatch>
  72. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount,
  73. VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,
  74. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  75. {
  76. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  77. return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) );
  78. }
  79. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  80. template <typename PhysicalDeviceAllocator, typename Dispatch>
  81. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type
  82. Instance::enumeratePhysicalDevices( Dispatch const & d ) const
  83. {
  84. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  85. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices;
  86. uint32_t physicalDeviceCount;
  87. VkResult result;
  88. do
  89. {
  90. result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr );
  91. if ( ( result == VK_SUCCESS ) && physicalDeviceCount )
  92. {
  93. physicalDevices.resize( physicalDeviceCount );
  94. result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) );
  95. }
  96. } while ( result == VK_INCOMPLETE );
  97. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
  98. VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
  99. if ( physicalDeviceCount < physicalDevices.size() )
  100. {
  101. physicalDevices.resize( physicalDeviceCount );
  102. }
  103. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDevices );
  104. }
  105. template <typename PhysicalDeviceAllocator,
  106. typename Dispatch,
  107. typename B1,
  108. typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDevice>::value, int>::type>
  109. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type
  110. Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const
  111. {
  112. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  113. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator );
  114. uint32_t physicalDeviceCount;
  115. VkResult result;
  116. do
  117. {
  118. result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr );
  119. if ( ( result == VK_SUCCESS ) && physicalDeviceCount )
  120. {
  121. physicalDevices.resize( physicalDeviceCount );
  122. result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) );
  123. }
  124. } while ( result == VK_INCOMPLETE );
  125. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
  126. VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
  127. if ( physicalDeviceCount < physicalDevices.size() )
  128. {
  129. physicalDevices.resize( physicalDeviceCount );
  130. }
  131. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDevices );
  132. }
  133. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  134. template <typename Dispatch>
  135. VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  136. {
  137. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  138. d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) );
  139. }
  140. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  141. template <typename Dispatch>
  142. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures
  143. PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  144. {
  145. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  146. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
  147. d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) );
  148. return features;
  149. }
  150. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  151. template <typename Dispatch>
  152. VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
  153. VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,
  154. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  155. {
  156. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  157. d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) );
  158. }
  159. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  160. template <typename Dispatch>
  161. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties
  162. PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  163. {
  164. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  165. VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
  166. d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
  167. return formatProperties;
  168. }
  169. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  170. template <typename Dispatch>
  171. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
  172. VULKAN_HPP_NAMESPACE::ImageType type,
  173. VULKAN_HPP_NAMESPACE::ImageTiling tiling,
  174. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  175. VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
  176. VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,
  177. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  178. {
  179. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  180. return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice,
  181. static_cast<VkFormat>( format ),
  182. static_cast<VkImageType>( type ),
  183. static_cast<VkImageTiling>( tiling ),
  184. static_cast<VkImageUsageFlags>( usage ),
  185. static_cast<VkImageCreateFlags>( flags ),
  186. reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) );
  187. }
  188. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  189. template <typename Dispatch>
  190. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type
  191. PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
  192. VULKAN_HPP_NAMESPACE::ImageType type,
  193. VULKAN_HPP_NAMESPACE::ImageTiling tiling,
  194. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  195. VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
  196. Dispatch const & d ) const
  197. {
  198. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  199. VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
  200. VkResult result = d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice,
  201. static_cast<VkFormat>( format ),
  202. static_cast<VkImageType>( type ),
  203. static_cast<VkImageTiling>( tiling ),
  204. static_cast<VkImageUsageFlags>( usage ),
  205. static_cast<VkImageCreateFlags>( flags ),
  206. reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) );
  207. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
  208. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
  209. }
  210. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  211. template <typename Dispatch>
  212. VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties,
  213. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  214. {
  215. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  216. d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) );
  217. }
  218. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  219. template <typename Dispatch>
  220. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties
  221. PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  222. {
  223. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  224. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
  225. d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) );
  226. return properties;
  227. }
  228. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  229. template <typename Dispatch>
  230. VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount,
  231. VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,
  232. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  233. {
  234. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  235. d.vkGetPhysicalDeviceQueueFamilyProperties(
  236. m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) );
  237. }
  238. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  239. template <typename QueueFamilyPropertiesAllocator, typename Dispatch>
  240. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator>
  241. PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const
  242. {
  243. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  244. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties;
  245. uint32_t queueFamilyPropertyCount;
  246. d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  247. queueFamilyProperties.resize( queueFamilyPropertyCount );
  248. d.vkGetPhysicalDeviceQueueFamilyProperties(
  249. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
  250. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  251. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  252. {
  253. queueFamilyProperties.resize( queueFamilyPropertyCount );
  254. }
  255. return queueFamilyProperties;
  256. }
  257. template <typename QueueFamilyPropertiesAllocator,
  258. typename Dispatch,
  259. typename B1,
  260. typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties>::value, int>::type>
  261. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator>
  262. PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const
  263. {
  264. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  265. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties( queueFamilyPropertiesAllocator );
  266. uint32_t queueFamilyPropertyCount;
  267. d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  268. queueFamilyProperties.resize( queueFamilyPropertyCount );
  269. d.vkGetPhysicalDeviceQueueFamilyProperties(
  270. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
  271. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  272. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  273. {
  274. queueFamilyProperties.resize( queueFamilyPropertyCount );
  275. }
  276. return queueFamilyProperties;
  277. }
  278. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  279. template <typename Dispatch>
  280. VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties,
  281. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  282. {
  283. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  284. d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) );
  285. }
  286. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  287. template <typename Dispatch>
  288. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties
  289. PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  290. {
  291. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  292. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
  293. d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
  294. return memoryProperties;
  295. }
  296. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  297. template <typename Dispatch>
  298. VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  299. {
  300. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  301. return d.vkGetInstanceProcAddr( m_instance, pName );
  302. }
  303. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  304. template <typename Dispatch>
  305. VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  306. {
  307. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  308. PFN_vkVoidFunction result = d.vkGetInstanceProcAddr( m_instance, name.c_str() );
  309. return result;
  310. }
  311. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  312. template <typename Dispatch>
  313. VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  314. {
  315. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  316. return d.vkGetDeviceProcAddr( m_device, pName );
  317. }
  318. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  319. template <typename Dispatch>
  320. VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  321. {
  322. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  323. PFN_vkVoidFunction result = d.vkGetDeviceProcAddr( m_device, name.c_str() );
  324. return result;
  325. }
  326. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  327. template <typename Dispatch>
  328. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo,
  329. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  330. VULKAN_HPP_NAMESPACE::Device * pDevice,
  331. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  332. {
  333. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  334. return static_cast<Result>( d.vkCreateDevice( m_physicalDevice,
  335. reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ),
  336. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  337. reinterpret_cast<VkDevice *>( pDevice ) ) );
  338. }
  339. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  340. template <typename Dispatch>
  341. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type PhysicalDevice::createDevice(
  342. const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  343. {
  344. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  345. VULKAN_HPP_NAMESPACE::Device device;
  346. VkResult result =
  347. d.vkCreateDevice( m_physicalDevice,
  348. reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
  349. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  350. reinterpret_cast<VkDevice *>( &device ) );
  351. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" );
  352. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), device );
  353. }
  354. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  355. template <typename Dispatch>
  356. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type
  357. PhysicalDevice::createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,
  358. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  359. Dispatch const & d ) const
  360. {
  361. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  362. VULKAN_HPP_NAMESPACE::Device device;
  363. VkResult result =
  364. d.vkCreateDevice( m_physicalDevice,
  365. reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
  366. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  367. reinterpret_cast<VkDevice *>( &device ) );
  368. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" );
  369. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  370. UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>( device, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) );
  371. }
  372. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  373. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  374. template <typename Dispatch>
  375. VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  376. {
  377. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  378. d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  379. }
  380. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  381. template <typename Dispatch>
  382. VULKAN_HPP_INLINE void Device::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  383. {
  384. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  385. d.vkDestroyDevice( m_device,
  386. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  387. }
  388. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  389. template <typename Dispatch>
  390. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char * pLayerName,
  391. uint32_t * pPropertyCount,
  392. VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
  393. Dispatch const & d ) VULKAN_HPP_NOEXCEPT
  394. {
  395. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  396. return static_cast<Result>(
  397. d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
  398. }
  399. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  400. template <typename ExtensionPropertiesAllocator, typename Dispatch>
  401. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
  402. enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d )
  403. {
  404. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  405. std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties;
  406. uint32_t propertyCount;
  407. VkResult result;
  408. do
  409. {
  410. result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
  411. if ( ( result == VK_SUCCESS ) && propertyCount )
  412. {
  413. properties.resize( propertyCount );
  414. result = d.vkEnumerateInstanceExtensionProperties(
  415. layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
  416. }
  417. } while ( result == VK_INCOMPLETE );
  418. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
  419. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  420. if ( propertyCount < properties.size() )
  421. {
  422. properties.resize( propertyCount );
  423. }
  424. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  425. }
  426. template <typename ExtensionPropertiesAllocator,
  427. typename Dispatch,
  428. typename B1,
  429. typename std::enable_if<std::is_same<typename B1::value_type, ExtensionProperties>::value, int>::type>
  430. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
  431. enumerateInstanceExtensionProperties( Optional<const std::string> layerName,
  432. ExtensionPropertiesAllocator & extensionPropertiesAllocator,
  433. Dispatch const & d )
  434. {
  435. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  436. std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
  437. uint32_t propertyCount;
  438. VkResult result;
  439. do
  440. {
  441. result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
  442. if ( ( result == VK_SUCCESS ) && propertyCount )
  443. {
  444. properties.resize( propertyCount );
  445. result = d.vkEnumerateInstanceExtensionProperties(
  446. layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
  447. }
  448. } while ( result == VK_INCOMPLETE );
  449. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
  450. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  451. if ( propertyCount < properties.size() )
  452. {
  453. properties.resize( propertyCount );
  454. }
  455. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  456. }
  457. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  458. template <typename Dispatch>
  459. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName,
  460. uint32_t * pPropertyCount,
  461. VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
  462. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  463. {
  464. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  465. return static_cast<Result>(
  466. d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
  467. }
  468. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  469. template <typename ExtensionPropertiesAllocator, typename Dispatch>
  470. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
  471. PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) const
  472. {
  473. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  474. std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties;
  475. uint32_t propertyCount;
  476. VkResult result;
  477. do
  478. {
  479. result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
  480. if ( ( result == VK_SUCCESS ) && propertyCount )
  481. {
  482. properties.resize( propertyCount );
  483. result = d.vkEnumerateDeviceExtensionProperties(
  484. m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
  485. }
  486. } while ( result == VK_INCOMPLETE );
  487. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
  488. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  489. if ( propertyCount < properties.size() )
  490. {
  491. properties.resize( propertyCount );
  492. }
  493. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  494. }
  495. template <typename ExtensionPropertiesAllocator,
  496. typename Dispatch,
  497. typename B1,
  498. typename std::enable_if<std::is_same<typename B1::value_type, ExtensionProperties>::value, int>::type>
  499. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
  500. PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName,
  501. ExtensionPropertiesAllocator & extensionPropertiesAllocator,
  502. Dispatch const & d ) const
  503. {
  504. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  505. std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
  506. uint32_t propertyCount;
  507. VkResult result;
  508. do
  509. {
  510. result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
  511. if ( ( result == VK_SUCCESS ) && propertyCount )
  512. {
  513. properties.resize( propertyCount );
  514. result = d.vkEnumerateDeviceExtensionProperties(
  515. m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
  516. }
  517. } while ( result == VK_INCOMPLETE );
  518. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
  519. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  520. if ( propertyCount < properties.size() )
  521. {
  522. properties.resize( propertyCount );
  523. }
  524. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  525. }
  526. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  527. template <typename Dispatch>
  528. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount,
  529. VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
  530. Dispatch const & d ) VULKAN_HPP_NOEXCEPT
  531. {
  532. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  533. return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
  534. }
  535. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  536. template <typename LayerPropertiesAllocator, typename Dispatch>
  537. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
  538. enumerateInstanceLayerProperties( Dispatch const & d )
  539. {
  540. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  541. std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties;
  542. uint32_t propertyCount;
  543. VkResult result;
  544. do
  545. {
  546. result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr );
  547. if ( ( result == VK_SUCCESS ) && propertyCount )
  548. {
  549. properties.resize( propertyCount );
  550. result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
  551. }
  552. } while ( result == VK_INCOMPLETE );
  553. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
  554. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  555. if ( propertyCount < properties.size() )
  556. {
  557. properties.resize( propertyCount );
  558. }
  559. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  560. }
  561. template <typename LayerPropertiesAllocator,
  562. typename Dispatch,
  563. typename B1,
  564. typename std::enable_if<std::is_same<typename B1::value_type, LayerProperties>::value, int>::type>
  565. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
  566. enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d )
  567. {
  568. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  569. std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
  570. uint32_t propertyCount;
  571. VkResult result;
  572. do
  573. {
  574. result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr );
  575. if ( ( result == VK_SUCCESS ) && propertyCount )
  576. {
  577. properties.resize( propertyCount );
  578. result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
  579. }
  580. } while ( result == VK_INCOMPLETE );
  581. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
  582. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  583. if ( propertyCount < properties.size() )
  584. {
  585. properties.resize( propertyCount );
  586. }
  587. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  588. }
  589. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  590. template <typename Dispatch>
  591. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount,
  592. VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
  593. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  594. {
  595. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  596. return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
  597. }
  598. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  599. template <typename LayerPropertiesAllocator, typename Dispatch>
  600. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
  601. PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const
  602. {
  603. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  604. std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties;
  605. uint32_t propertyCount;
  606. VkResult result;
  607. do
  608. {
  609. result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr );
  610. if ( ( result == VK_SUCCESS ) && propertyCount )
  611. {
  612. properties.resize( propertyCount );
  613. result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
  614. }
  615. } while ( result == VK_INCOMPLETE );
  616. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
  617. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  618. if ( propertyCount < properties.size() )
  619. {
  620. properties.resize( propertyCount );
  621. }
  622. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  623. }
  624. template <typename LayerPropertiesAllocator,
  625. typename Dispatch,
  626. typename B1,
  627. typename std::enable_if<std::is_same<typename B1::value_type, LayerProperties>::value, int>::type>
  628. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
  629. PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) const
  630. {
  631. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  632. std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
  633. uint32_t propertyCount;
  634. VkResult result;
  635. do
  636. {
  637. result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr );
  638. if ( ( result == VK_SUCCESS ) && propertyCount )
  639. {
  640. properties.resize( propertyCount );
  641. result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
  642. }
  643. } while ( result == VK_INCOMPLETE );
  644. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
  645. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  646. if ( propertyCount < properties.size() )
  647. {
  648. properties.resize( propertyCount );
  649. }
  650. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  651. }
  652. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  653. template <typename Dispatch>
  654. VULKAN_HPP_INLINE void
  655. Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  656. {
  657. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  658. d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) );
  659. }
  660. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  661. template <typename Dispatch>
  662. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue
  663. Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  664. {
  665. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  666. VULKAN_HPP_NAMESPACE::Queue queue;
  667. d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) );
  668. return queue;
  669. }
  670. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  671. template <typename Dispatch>
  672. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount,
  673. const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,
  674. VULKAN_HPP_NAMESPACE::Fence fence,
  675. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  676. {
  677. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  678. return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) );
  679. }
  680. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  681. template <typename Dispatch>
  682. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  683. Queue::submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
  684. {
  685. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  686. VkResult result = d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) );
  687. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
  688. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  689. }
  690. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  691. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  692. template <typename Dispatch>
  693. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  694. {
  695. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  696. return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
  697. }
  698. #else
  699. template <typename Dispatch>
  700. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::waitIdle( Dispatch const & d ) const
  701. {
  702. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  703. VkResult result = d.vkQueueWaitIdle( m_queue );
  704. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
  705. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  706. }
  707. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  708. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  709. template <typename Dispatch>
  710. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  711. {
  712. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  713. return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
  714. }
  715. #else
  716. template <typename Dispatch>
  717. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::waitIdle( Dispatch const & d ) const
  718. {
  719. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  720. VkResult result = d.vkDeviceWaitIdle( m_device );
  721. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
  722. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  723. }
  724. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  725. template <typename Dispatch>
  726. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo,
  727. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  728. VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory,
  729. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  730. {
  731. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  732. return static_cast<Result>( d.vkAllocateMemory( m_device,
  733. reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ),
  734. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  735. reinterpret_cast<VkDeviceMemory *>( pMemory ) ) );
  736. }
  737. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  738. template <typename Dispatch>
  739. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type
  740. Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,
  741. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  742. Dispatch const & d ) const
  743. {
  744. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  745. VULKAN_HPP_NAMESPACE::DeviceMemory memory;
  746. VkResult result =
  747. d.vkAllocateMemory( m_device,
  748. reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
  749. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  750. reinterpret_cast<VkDeviceMemory *>( &memory ) );
  751. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" );
  752. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memory );
  753. }
  754. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  755. template <typename Dispatch>
  756. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type
  757. Device::allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,
  758. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  759. Dispatch const & d ) const
  760. {
  761. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  762. VULKAN_HPP_NAMESPACE::DeviceMemory memory;
  763. VkResult result =
  764. d.vkAllocateMemory( m_device,
  765. reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
  766. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  767. reinterpret_cast<VkDeviceMemory *>( &memory ) );
  768. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" );
  769. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  770. UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>( memory, ObjectFree<Device, Dispatch>( *this, allocator, d ) ) );
  771. }
  772. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  773. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  774. template <typename Dispatch>
  775. VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  776. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  777. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  778. {
  779. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  780. d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  781. }
  782. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  783. template <typename Dispatch>
  784. VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  785. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  786. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  787. {
  788. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  789. d.vkFreeMemory( m_device,
  790. static_cast<VkDeviceMemory>( memory ),
  791. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  792. }
  793. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  794. template <typename Dispatch>
  795. VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  796. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  797. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  798. {
  799. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  800. d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  801. }
  802. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  803. template <typename Dispatch>
  804. VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  805. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  806. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  807. {
  808. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  809. d.vkFreeMemory( m_device,
  810. static_cast<VkDeviceMemory>( memory ),
  811. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  812. }
  813. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  814. template <typename Dispatch>
  815. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  816. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  817. VULKAN_HPP_NAMESPACE::DeviceSize size,
  818. VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,
  819. void ** ppData,
  820. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  821. {
  822. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  823. return static_cast<Result>( d.vkMapMemory( m_device,
  824. static_cast<VkDeviceMemory>( memory ),
  825. static_cast<VkDeviceSize>( offset ),
  826. static_cast<VkDeviceSize>( size ),
  827. static_cast<VkMemoryMapFlags>( flags ),
  828. ppData ) );
  829. }
  830. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  831. template <typename Dispatch>
  832. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  833. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  834. VULKAN_HPP_NAMESPACE::DeviceSize size,
  835. VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,
  836. Dispatch const & d ) const
  837. {
  838. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  839. void * pData;
  840. VkResult result = d.vkMapMemory( m_device,
  841. static_cast<VkDeviceMemory>( memory ),
  842. static_cast<VkDeviceSize>( offset ),
  843. static_cast<VkDeviceSize>( size ),
  844. static_cast<VkMemoryMapFlags>( flags ),
  845. &pData );
  846. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" );
  847. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pData );
  848. }
  849. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  850. template <typename Dispatch>
  851. VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  852. {
  853. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  854. d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
  855. }
  856. template <typename Dispatch>
  857. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount,
  858. const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
  859. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  860. {
  861. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  862. return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
  863. }
  864. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  865. template <typename Dispatch>
  866. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  867. Device::flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const
  868. {
  869. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  870. VkResult result = d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
  871. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
  872. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  873. }
  874. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  875. template <typename Dispatch>
  876. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount,
  877. const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
  878. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  879. {
  880. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  881. return static_cast<Result>(
  882. d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
  883. }
  884. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  885. template <typename Dispatch>
  886. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  887. Device::invalidateMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const
  888. {
  889. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  890. VkResult result = d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
  891. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
  892. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  893. }
  894. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  895. template <typename Dispatch>
  896. VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  897. VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes,
  898. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  899. {
  900. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  901. d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) );
  902. }
  903. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  904. template <typename Dispatch>
  905. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  906. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  907. {
  908. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  909. VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
  910. d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
  911. return committedMemoryInBytes;
  912. }
  913. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  914. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  915. template <typename Dispatch>
  916. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer,
  917. VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  918. VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
  919. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  920. {
  921. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  922. return static_cast<Result>(
  923. d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
  924. }
  925. #else
  926. template <typename Dispatch>
  927. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory(
  928. VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const
  929. {
  930. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  931. VkResult result =
  932. d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
  933. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" );
  934. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  935. }
  936. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  937. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  938. template <typename Dispatch>
  939. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image,
  940. VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  941. VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
  942. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  943. {
  944. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  945. return static_cast<Result>(
  946. d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
  947. }
  948. #else
  949. template <typename Dispatch>
  950. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory(
  951. VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const
  952. {
  953. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  954. VkResult result =
  955. d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
  956. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" );
  957. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  958. }
  959. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  960. template <typename Dispatch>
  961. VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer,
  962. VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
  963. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  964. {
  965. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  966. d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
  967. }
  968. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  969. template <typename Dispatch>
  970. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements
  971. Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  972. {
  973. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  974. VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
  975. d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
  976. return memoryRequirements;
  977. }
  978. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  979. template <typename Dispatch>
  980. VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
  981. VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
  982. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  983. {
  984. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  985. d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
  986. }
  987. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  988. template <typename Dispatch>
  989. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements
  990. Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  991. {
  992. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  993. VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
  994. d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
  995. return memoryRequirements;
  996. }
  997. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  998. template <typename Dispatch>
  999. VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
  1000. uint32_t * pSparseMemoryRequirementCount,
  1001. VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,
  1002. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1003. {
  1004. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1005. d.vkGetImageSparseMemoryRequirements( m_device,
  1006. static_cast<VkImage>( image ),
  1007. pSparseMemoryRequirementCount,
  1008. reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) );
  1009. }
  1010. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1011. template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch>
  1012. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
  1013. Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
  1014. {
  1015. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1016. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements;
  1017. uint32_t sparseMemoryRequirementCount;
  1018. d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
  1019. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  1020. d.vkGetImageSparseMemoryRequirements( m_device,
  1021. static_cast<VkImage>( image ),
  1022. &sparseMemoryRequirementCount,
  1023. reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
  1024. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  1025. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  1026. {
  1027. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  1028. }
  1029. return sparseMemoryRequirements;
  1030. }
  1031. template <typename SparseImageMemoryRequirementsAllocator,
  1032. typename Dispatch,
  1033. typename B1,
  1034. typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements>::value, int>::type>
  1035. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
  1036. Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
  1037. SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,
  1038. Dispatch const & d ) const
  1039. {
  1040. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1041. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements(
  1042. sparseImageMemoryRequirementsAllocator );
  1043. uint32_t sparseMemoryRequirementCount;
  1044. d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
  1045. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  1046. d.vkGetImageSparseMemoryRequirements( m_device,
  1047. static_cast<VkImage>( image ),
  1048. &sparseMemoryRequirementCount,
  1049. reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
  1050. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  1051. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  1052. {
  1053. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  1054. }
  1055. return sparseMemoryRequirements;
  1056. }
  1057. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1058. template <typename Dispatch>
  1059. VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
  1060. VULKAN_HPP_NAMESPACE::ImageType type,
  1061. VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
  1062. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  1063. VULKAN_HPP_NAMESPACE::ImageTiling tiling,
  1064. uint32_t * pPropertyCount,
  1065. VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties,
  1066. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1067. {
  1068. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1069. d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
  1070. static_cast<VkFormat>( format ),
  1071. static_cast<VkImageType>( type ),
  1072. static_cast<VkSampleCountFlagBits>( samples ),
  1073. static_cast<VkImageUsageFlags>( usage ),
  1074. static_cast<VkImageTiling>( tiling ),
  1075. pPropertyCount,
  1076. reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) );
  1077. }
  1078. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1079. template <typename SparseImageFormatPropertiesAllocator, typename Dispatch>
  1080. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
  1081. PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
  1082. VULKAN_HPP_NAMESPACE::ImageType type,
  1083. VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
  1084. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  1085. VULKAN_HPP_NAMESPACE::ImageTiling tiling,
  1086. Dispatch const & d ) const
  1087. {
  1088. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1089. std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties;
  1090. uint32_t propertyCount;
  1091. d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
  1092. static_cast<VkFormat>( format ),
  1093. static_cast<VkImageType>( type ),
  1094. static_cast<VkSampleCountFlagBits>( samples ),
  1095. static_cast<VkImageUsageFlags>( usage ),
  1096. static_cast<VkImageTiling>( tiling ),
  1097. &propertyCount,
  1098. nullptr );
  1099. properties.resize( propertyCount );
  1100. d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
  1101. static_cast<VkFormat>( format ),
  1102. static_cast<VkImageType>( type ),
  1103. static_cast<VkSampleCountFlagBits>( samples ),
  1104. static_cast<VkImageUsageFlags>( usage ),
  1105. static_cast<VkImageTiling>( tiling ),
  1106. &propertyCount,
  1107. reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
  1108. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  1109. if ( propertyCount < properties.size() )
  1110. {
  1111. properties.resize( propertyCount );
  1112. }
  1113. return properties;
  1114. }
  1115. template <typename SparseImageFormatPropertiesAllocator,
  1116. typename Dispatch,
  1117. typename B1,
  1118. typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties>::value, int>::type>
  1119. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
  1120. PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
  1121. VULKAN_HPP_NAMESPACE::ImageType type,
  1122. VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
  1123. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  1124. VULKAN_HPP_NAMESPACE::ImageTiling tiling,
  1125. SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator,
  1126. Dispatch const & d ) const
  1127. {
  1128. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1129. std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties( sparseImageFormatPropertiesAllocator );
  1130. uint32_t propertyCount;
  1131. d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
  1132. static_cast<VkFormat>( format ),
  1133. static_cast<VkImageType>( type ),
  1134. static_cast<VkSampleCountFlagBits>( samples ),
  1135. static_cast<VkImageUsageFlags>( usage ),
  1136. static_cast<VkImageTiling>( tiling ),
  1137. &propertyCount,
  1138. nullptr );
  1139. properties.resize( propertyCount );
  1140. d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
  1141. static_cast<VkFormat>( format ),
  1142. static_cast<VkImageType>( type ),
  1143. static_cast<VkSampleCountFlagBits>( samples ),
  1144. static_cast<VkImageUsageFlags>( usage ),
  1145. static_cast<VkImageTiling>( tiling ),
  1146. &propertyCount,
  1147. reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
  1148. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  1149. if ( propertyCount < properties.size() )
  1150. {
  1151. properties.resize( propertyCount );
  1152. }
  1153. return properties;
  1154. }
  1155. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1156. template <typename Dispatch>
  1157. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount,
  1158. const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo,
  1159. VULKAN_HPP_NAMESPACE::Fence fence,
  1160. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1161. {
  1162. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1163. return static_cast<Result>(
  1164. d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) );
  1165. }
  1166. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1167. template <typename Dispatch>
  1168. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  1169. Queue::bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
  1170. {
  1171. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1172. VkResult result =
  1173. d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) );
  1174. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" );
  1175. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  1176. }
  1177. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1178. template <typename Dispatch>
  1179. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo,
  1180. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1181. VULKAN_HPP_NAMESPACE::Fence * pFence,
  1182. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1183. {
  1184. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1185. return static_cast<Result>( d.vkCreateFence( m_device,
  1186. reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ),
  1187. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  1188. reinterpret_cast<VkFence *>( pFence ) ) );
  1189. }
  1190. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1191. template <typename Dispatch>
  1192. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::createFence(
  1193. const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  1194. {
  1195. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1196. VULKAN_HPP_NAMESPACE::Fence fence;
  1197. VkResult result =
  1198. d.vkCreateFence( m_device,
  1199. reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
  1200. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1201. reinterpret_cast<VkFence *>( &fence ) );
  1202. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" );
  1203. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
  1204. }
  1205. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  1206. template <typename Dispatch>
  1207. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::createFenceUnique(
  1208. const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  1209. {
  1210. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1211. VULKAN_HPP_NAMESPACE::Fence fence;
  1212. VkResult result =
  1213. d.vkCreateFence( m_device,
  1214. reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
  1215. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1216. reinterpret_cast<VkFence *>( &fence ) );
  1217. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" );
  1218. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1219. UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  1220. }
  1221. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  1222. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1223. template <typename Dispatch>
  1224. VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence,
  1225. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1226. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1227. {
  1228. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1229. d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1230. }
  1231. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1232. template <typename Dispatch>
  1233. VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence,
  1234. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1235. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1236. {
  1237. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1238. d.vkDestroyFence( m_device,
  1239. static_cast<VkFence>( fence ),
  1240. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1241. }
  1242. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1243. template <typename Dispatch>
  1244. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence,
  1245. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1246. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1247. {
  1248. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1249. d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1250. }
  1251. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1252. template <typename Dispatch>
  1253. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence,
  1254. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1255. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1256. {
  1257. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1258. d.vkDestroyFence( m_device,
  1259. static_cast<VkFence>( fence ),
  1260. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1261. }
  1262. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1263. template <typename Dispatch>
  1264. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount,
  1265. const VULKAN_HPP_NAMESPACE::Fence * pFences,
  1266. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1267. {
  1268. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1269. return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) );
  1270. }
  1271. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1272. template <typename Dispatch>
  1273. VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
  1274. Dispatch const & d ) const
  1275. {
  1276. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1277. VkResult result = d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) );
  1278. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
  1279. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  1280. }
  1281. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1282. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1283. template <typename Dispatch>
  1284. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1285. {
  1286. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1287. return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
  1288. }
  1289. #else
  1290. template <typename Dispatch>
  1291. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
  1292. {
  1293. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1294. VkResult result = d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) );
  1295. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1296. VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus",
  1297. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
  1298. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  1299. }
  1300. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1301. template <typename Dispatch>
  1302. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount,
  1303. const VULKAN_HPP_NAMESPACE::Fence * pFences,
  1304. VULKAN_HPP_NAMESPACE::Bool32 waitAll,
  1305. uint64_t timeout,
  1306. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1307. {
  1308. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1309. return static_cast<Result>(
  1310. d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
  1311. }
  1312. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1313. template <typename Dispatch>
  1314. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
  1315. VULKAN_HPP_NAMESPACE::Bool32 waitAll,
  1316. uint64_t timeout,
  1317. Dispatch const & d ) const
  1318. {
  1319. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1320. VkResult result =
  1321. d.vkWaitForFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout );
  1322. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1323. VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences",
  1324. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
  1325. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  1326. }
  1327. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1328. template <typename Dispatch>
  1329. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,
  1330. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1331. VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore,
  1332. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1333. {
  1334. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1335. return static_cast<Result>( d.vkCreateSemaphore( m_device,
  1336. reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ),
  1337. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  1338. reinterpret_cast<VkSemaphore *>( pSemaphore ) ) );
  1339. }
  1340. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1341. template <typename Dispatch>
  1342. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type
  1343. Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,
  1344. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1345. Dispatch const & d ) const
  1346. {
  1347. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1348. VULKAN_HPP_NAMESPACE::Semaphore semaphore;
  1349. VkResult result =
  1350. d.vkCreateSemaphore( m_device,
  1351. reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
  1352. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1353. reinterpret_cast<VkSemaphore *>( &semaphore ) );
  1354. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" );
  1355. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), semaphore );
  1356. }
  1357. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  1358. template <typename Dispatch>
  1359. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type
  1360. Device::createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,
  1361. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1362. Dispatch const & d ) const
  1363. {
  1364. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1365. VULKAN_HPP_NAMESPACE::Semaphore semaphore;
  1366. VkResult result =
  1367. d.vkCreateSemaphore( m_device,
  1368. reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
  1369. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1370. reinterpret_cast<VkSemaphore *>( &semaphore ) );
  1371. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" );
  1372. return createResultValueType(
  1373. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1374. UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>( semaphore, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  1375. }
  1376. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  1377. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1378. template <typename Dispatch>
  1379. VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
  1380. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1381. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1382. {
  1383. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1384. d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1385. }
  1386. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1387. template <typename Dispatch>
  1388. VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
  1389. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1390. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1391. {
  1392. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1393. d.vkDestroySemaphore( m_device,
  1394. static_cast<VkSemaphore>( semaphore ),
  1395. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1396. }
  1397. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1398. template <typename Dispatch>
  1399. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
  1400. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1401. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1402. {
  1403. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1404. d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1405. }
  1406. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1407. template <typename Dispatch>
  1408. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
  1409. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1410. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1411. {
  1412. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1413. d.vkDestroySemaphore( m_device,
  1414. static_cast<VkSemaphore>( semaphore ),
  1415. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1416. }
  1417. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1418. template <typename Dispatch>
  1419. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo,
  1420. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1421. VULKAN_HPP_NAMESPACE::Event * pEvent,
  1422. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1423. {
  1424. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1425. return static_cast<Result>( d.vkCreateEvent( m_device,
  1426. reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ),
  1427. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  1428. reinterpret_cast<VkEvent *>( pEvent ) ) );
  1429. }
  1430. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1431. template <typename Dispatch>
  1432. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type Device::createEvent(
  1433. const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  1434. {
  1435. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1436. VULKAN_HPP_NAMESPACE::Event event;
  1437. VkResult result =
  1438. d.vkCreateEvent( m_device,
  1439. reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
  1440. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1441. reinterpret_cast<VkEvent *>( &event ) );
  1442. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" );
  1443. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), event );
  1444. }
  1445. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  1446. template <typename Dispatch>
  1447. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type Device::createEventUnique(
  1448. const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  1449. {
  1450. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1451. VULKAN_HPP_NAMESPACE::Event event;
  1452. VkResult result =
  1453. d.vkCreateEvent( m_device,
  1454. reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
  1455. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1456. reinterpret_cast<VkEvent *>( &event ) );
  1457. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" );
  1458. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1459. UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>( event, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  1460. }
  1461. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  1462. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1463. template <typename Dispatch>
  1464. VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event,
  1465. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1466. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1467. {
  1468. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1469. d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1470. }
  1471. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1472. template <typename Dispatch>
  1473. VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event,
  1474. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1475. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1476. {
  1477. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1478. d.vkDestroyEvent( m_device,
  1479. static_cast<VkEvent>( event ),
  1480. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1481. }
  1482. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1483. template <typename Dispatch>
  1484. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event,
  1485. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1486. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1487. {
  1488. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1489. d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1490. }
  1491. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1492. template <typename Dispatch>
  1493. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event,
  1494. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1495. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1496. {
  1497. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1498. d.vkDestroyEvent( m_device,
  1499. static_cast<VkEvent>( event ),
  1500. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1501. }
  1502. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1503. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1504. template <typename Dispatch>
  1505. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1506. {
  1507. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1508. return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
  1509. }
  1510. #else
  1511. template <typename Dispatch>
  1512. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
  1513. {
  1514. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1515. VkResult result = d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) );
  1516. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1517. VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus",
  1518. { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } );
  1519. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  1520. }
  1521. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1522. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1523. template <typename Dispatch>
  1524. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1525. {
  1526. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1527. return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
  1528. }
  1529. #else
  1530. template <typename Dispatch>
  1531. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event,
  1532. Dispatch const & d ) const
  1533. {
  1534. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1535. VkResult result = d.vkSetEvent( m_device, static_cast<VkEvent>( event ) );
  1536. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" );
  1537. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  1538. }
  1539. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1540. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1541. template <typename Dispatch>
  1542. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1543. {
  1544. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1545. return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
  1546. }
  1547. #else
  1548. template <typename Dispatch>
  1549. VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
  1550. {
  1551. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1552. VkResult result = d.vkResetEvent( m_device, static_cast<VkEvent>( event ) );
  1553. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" );
  1554. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  1555. }
  1556. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1557. template <typename Dispatch>
  1558. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,
  1559. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1560. VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool,
  1561. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1562. {
  1563. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1564. return static_cast<Result>( d.vkCreateQueryPool( m_device,
  1565. reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ),
  1566. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  1567. reinterpret_cast<VkQueryPool *>( pQueryPool ) ) );
  1568. }
  1569. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1570. template <typename Dispatch>
  1571. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type
  1572. Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,
  1573. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1574. Dispatch const & d ) const
  1575. {
  1576. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1577. VULKAN_HPP_NAMESPACE::QueryPool queryPool;
  1578. VkResult result =
  1579. d.vkCreateQueryPool( m_device,
  1580. reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
  1581. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1582. reinterpret_cast<VkQueryPool *>( &queryPool ) );
  1583. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" );
  1584. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), queryPool );
  1585. }
  1586. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  1587. template <typename Dispatch>
  1588. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type
  1589. Device::createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,
  1590. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1591. Dispatch const & d ) const
  1592. {
  1593. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1594. VULKAN_HPP_NAMESPACE::QueryPool queryPool;
  1595. VkResult result =
  1596. d.vkCreateQueryPool( m_device,
  1597. reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
  1598. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1599. reinterpret_cast<VkQueryPool *>( &queryPool ) );
  1600. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" );
  1601. return createResultValueType(
  1602. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1603. UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>( queryPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  1604. }
  1605. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  1606. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1607. template <typename Dispatch>
  1608. VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  1609. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1610. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1611. {
  1612. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1613. d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1614. }
  1615. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1616. template <typename Dispatch>
  1617. VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  1618. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1619. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1620. {
  1621. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1622. d.vkDestroyQueryPool( m_device,
  1623. static_cast<VkQueryPool>( queryPool ),
  1624. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1625. }
  1626. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1627. template <typename Dispatch>
  1628. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  1629. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1630. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1631. {
  1632. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1633. d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1634. }
  1635. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1636. template <typename Dispatch>
  1637. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  1638. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1639. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1640. {
  1641. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1642. d.vkDestroyQueryPool( m_device,
  1643. static_cast<VkQueryPool>( queryPool ),
  1644. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1645. }
  1646. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1647. template <typename Dispatch>
  1648. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  1649. uint32_t firstQuery,
  1650. uint32_t queryCount,
  1651. size_t dataSize,
  1652. void * pData,
  1653. VULKAN_HPP_NAMESPACE::DeviceSize stride,
  1654. VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
  1655. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1656. {
  1657. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1658. return static_cast<Result>( d.vkGetQueryPoolResults( m_device,
  1659. static_cast<VkQueryPool>( queryPool ),
  1660. firstQuery,
  1661. queryCount,
  1662. dataSize,
  1663. pData,
  1664. static_cast<VkDeviceSize>( stride ),
  1665. static_cast<VkQueryResultFlags>( flags ) ) );
  1666. }
  1667. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1668. template <typename DataType, typename DataTypeAllocator, typename Dispatch>
  1669. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<DataType, DataTypeAllocator>>
  1670. Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  1671. uint32_t firstQuery,
  1672. uint32_t queryCount,
  1673. size_t dataSize,
  1674. VULKAN_HPP_NAMESPACE::DeviceSize stride,
  1675. VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
  1676. Dispatch const & d ) const
  1677. {
  1678. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1679. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  1680. std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
  1681. VkResult result = d.vkGetQueryPoolResults( m_device,
  1682. static_cast<VkQueryPool>( queryPool ),
  1683. firstQuery,
  1684. queryCount,
  1685. data.size() * sizeof( DataType ),
  1686. reinterpret_cast<void *>( data.data() ),
  1687. static_cast<VkDeviceSize>( stride ),
  1688. static_cast<VkQueryResultFlags>( flags ) );
  1689. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1690. VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults",
  1691. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
  1692. return ResultValue<std::vector<DataType, DataTypeAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  1693. }
  1694. template <typename DataType, typename Dispatch>
  1695. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<DataType> Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  1696. uint32_t firstQuery,
  1697. uint32_t queryCount,
  1698. VULKAN_HPP_NAMESPACE::DeviceSize stride,
  1699. VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
  1700. Dispatch const & d ) const
  1701. {
  1702. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1703. DataType data;
  1704. VkResult result = d.vkGetQueryPoolResults( m_device,
  1705. static_cast<VkQueryPool>( queryPool ),
  1706. firstQuery,
  1707. queryCount,
  1708. sizeof( DataType ),
  1709. reinterpret_cast<void *>( &data ),
  1710. static_cast<VkDeviceSize>( stride ),
  1711. static_cast<VkQueryResultFlags>( flags ) );
  1712. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1713. VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult",
  1714. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
  1715. return ResultValue<DataType>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  1716. }
  1717. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1718. template <typename Dispatch>
  1719. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo,
  1720. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1721. VULKAN_HPP_NAMESPACE::Buffer * pBuffer,
  1722. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1723. {
  1724. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1725. return static_cast<Result>( d.vkCreateBuffer( m_device,
  1726. reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ),
  1727. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  1728. reinterpret_cast<VkBuffer *>( pBuffer ) ) );
  1729. }
  1730. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1731. template <typename Dispatch>
  1732. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type Device::createBuffer(
  1733. const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  1734. {
  1735. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1736. VULKAN_HPP_NAMESPACE::Buffer buffer;
  1737. VkResult result =
  1738. d.vkCreateBuffer( m_device,
  1739. reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
  1740. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1741. reinterpret_cast<VkBuffer *>( &buffer ) );
  1742. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" );
  1743. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), buffer );
  1744. }
  1745. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  1746. template <typename Dispatch>
  1747. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type Device::createBufferUnique(
  1748. const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  1749. {
  1750. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1751. VULKAN_HPP_NAMESPACE::Buffer buffer;
  1752. VkResult result =
  1753. d.vkCreateBuffer( m_device,
  1754. reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
  1755. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1756. reinterpret_cast<VkBuffer *>( &buffer ) );
  1757. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" );
  1758. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1759. UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>( buffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  1760. }
  1761. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  1762. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1763. template <typename Dispatch>
  1764. VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
  1765. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1766. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1767. {
  1768. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1769. d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1770. }
  1771. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1772. template <typename Dispatch>
  1773. VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
  1774. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1775. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1776. {
  1777. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1778. d.vkDestroyBuffer( m_device,
  1779. static_cast<VkBuffer>( buffer ),
  1780. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1781. }
  1782. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1783. template <typename Dispatch>
  1784. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
  1785. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1786. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1787. {
  1788. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1789. d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1790. }
  1791. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1792. template <typename Dispatch>
  1793. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
  1794. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1795. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1796. {
  1797. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1798. d.vkDestroyBuffer( m_device,
  1799. static_cast<VkBuffer>( buffer ),
  1800. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1801. }
  1802. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1803. template <typename Dispatch>
  1804. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,
  1805. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1806. VULKAN_HPP_NAMESPACE::BufferView * pView,
  1807. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1808. {
  1809. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1810. return static_cast<Result>( d.vkCreateBufferView( m_device,
  1811. reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ),
  1812. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  1813. reinterpret_cast<VkBufferView *>( pView ) ) );
  1814. }
  1815. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1816. template <typename Dispatch>
  1817. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type
  1818. Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,
  1819. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1820. Dispatch const & d ) const
  1821. {
  1822. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1823. VULKAN_HPP_NAMESPACE::BufferView view;
  1824. VkResult result =
  1825. d.vkCreateBufferView( m_device,
  1826. reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
  1827. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1828. reinterpret_cast<VkBufferView *>( &view ) );
  1829. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" );
  1830. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), view );
  1831. }
  1832. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  1833. template <typename Dispatch>
  1834. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type
  1835. Device::createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,
  1836. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1837. Dispatch const & d ) const
  1838. {
  1839. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1840. VULKAN_HPP_NAMESPACE::BufferView view;
  1841. VkResult result =
  1842. d.vkCreateBufferView( m_device,
  1843. reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
  1844. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1845. reinterpret_cast<VkBufferView *>( &view ) );
  1846. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" );
  1847. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1848. UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  1849. }
  1850. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  1851. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1852. template <typename Dispatch>
  1853. VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView,
  1854. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1855. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1856. {
  1857. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1858. d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1859. }
  1860. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1861. template <typename Dispatch>
  1862. VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView,
  1863. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1864. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1865. {
  1866. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1867. d.vkDestroyBufferView( m_device,
  1868. static_cast<VkBufferView>( bufferView ),
  1869. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1870. }
  1871. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1872. template <typename Dispatch>
  1873. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
  1874. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1875. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1876. {
  1877. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1878. d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1879. }
  1880. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1881. template <typename Dispatch>
  1882. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
  1883. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1884. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1885. {
  1886. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1887. d.vkDestroyBufferView( m_device,
  1888. static_cast<VkBufferView>( bufferView ),
  1889. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1890. }
  1891. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1892. template <typename Dispatch>
  1893. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo,
  1894. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1895. VULKAN_HPP_NAMESPACE::Image * pImage,
  1896. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1897. {
  1898. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1899. return static_cast<Result>( d.vkCreateImage( m_device,
  1900. reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ),
  1901. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  1902. reinterpret_cast<VkImage *>( pImage ) ) );
  1903. }
  1904. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1905. template <typename Dispatch>
  1906. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type Device::createImage(
  1907. const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  1908. {
  1909. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1910. VULKAN_HPP_NAMESPACE::Image image;
  1911. VkResult result =
  1912. d.vkCreateImage( m_device,
  1913. reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
  1914. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1915. reinterpret_cast<VkImage *>( &image ) );
  1916. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" );
  1917. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), image );
  1918. }
  1919. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  1920. template <typename Dispatch>
  1921. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type Device::createImageUnique(
  1922. const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  1923. {
  1924. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1925. VULKAN_HPP_NAMESPACE::Image image;
  1926. VkResult result =
  1927. d.vkCreateImage( m_device,
  1928. reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
  1929. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1930. reinterpret_cast<VkImage *>( &image ) );
  1931. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" );
  1932. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1933. UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>( image, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  1934. }
  1935. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  1936. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1937. template <typename Dispatch>
  1938. VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image,
  1939. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1940. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1941. {
  1942. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1943. d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1944. }
  1945. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1946. template <typename Dispatch>
  1947. VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image,
  1948. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1949. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1950. {
  1951. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1952. d.vkDestroyImage( m_device,
  1953. static_cast<VkImage>( image ),
  1954. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1955. }
  1956. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1957. template <typename Dispatch>
  1958. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image,
  1959. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1960. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1961. {
  1962. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1963. d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1964. }
  1965. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1966. template <typename Dispatch>
  1967. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image,
  1968. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1969. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1970. {
  1971. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1972. d.vkDestroyImage( m_device,
  1973. static_cast<VkImage>( image ),
  1974. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1975. }
  1976. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1977. template <typename Dispatch>
  1978. VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image,
  1979. const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource,
  1980. VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout,
  1981. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1982. {
  1983. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1984. d.vkGetImageSubresourceLayout( m_device,
  1985. static_cast<VkImage>( image ),
  1986. reinterpret_cast<const VkImageSubresource *>( pSubresource ),
  1987. reinterpret_cast<VkSubresourceLayout *>( pLayout ) );
  1988. }
  1989. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1990. template <typename Dispatch>
  1991. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout(
  1992. VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1993. {
  1994. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1995. VULKAN_HPP_NAMESPACE::SubresourceLayout layout;
  1996. d.vkGetImageSubresourceLayout( m_device,
  1997. static_cast<VkImage>( image ),
  1998. reinterpret_cast<const VkImageSubresource *>( &subresource ),
  1999. reinterpret_cast<VkSubresourceLayout *>( &layout ) );
  2000. return layout;
  2001. }
  2002. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2003. template <typename Dispatch>
  2004. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo,
  2005. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2006. VULKAN_HPP_NAMESPACE::ImageView * pView,
  2007. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2008. {
  2009. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2010. return static_cast<Result>( d.vkCreateImageView( m_device,
  2011. reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ),
  2012. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  2013. reinterpret_cast<VkImageView *>( pView ) ) );
  2014. }
  2015. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2016. template <typename Dispatch>
  2017. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type
  2018. Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,
  2019. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2020. Dispatch const & d ) const
  2021. {
  2022. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2023. VULKAN_HPP_NAMESPACE::ImageView view;
  2024. VkResult result =
  2025. d.vkCreateImageView( m_device,
  2026. reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
  2027. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2028. reinterpret_cast<VkImageView *>( &view ) );
  2029. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" );
  2030. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), view );
  2031. }
  2032. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  2033. template <typename Dispatch>
  2034. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type
  2035. Device::createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,
  2036. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2037. Dispatch const & d ) const
  2038. {
  2039. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2040. VULKAN_HPP_NAMESPACE::ImageView view;
  2041. VkResult result =
  2042. d.vkCreateImageView( m_device,
  2043. reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
  2044. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2045. reinterpret_cast<VkImageView *>( &view ) );
  2046. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" );
  2047. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2048. UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  2049. }
  2050. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  2051. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2052. template <typename Dispatch>
  2053. VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView,
  2054. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2055. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2056. {
  2057. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2058. d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2059. }
  2060. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2061. template <typename Dispatch>
  2062. VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView,
  2063. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2064. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2065. {
  2066. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2067. d.vkDestroyImageView( m_device,
  2068. static_cast<VkImageView>( imageView ),
  2069. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2070. }
  2071. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2072. template <typename Dispatch>
  2073. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
  2074. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2075. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2076. {
  2077. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2078. d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2079. }
  2080. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2081. template <typename Dispatch>
  2082. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
  2083. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2084. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2085. {
  2086. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2087. d.vkDestroyImageView( m_device,
  2088. static_cast<VkImageView>( imageView ),
  2089. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2090. }
  2091. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2092. template <typename Dispatch>
  2093. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,
  2094. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2095. VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule,
  2096. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2097. {
  2098. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2099. return static_cast<Result>( d.vkCreateShaderModule( m_device,
  2100. reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ),
  2101. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  2102. reinterpret_cast<VkShaderModule *>( pShaderModule ) ) );
  2103. }
  2104. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2105. template <typename Dispatch>
  2106. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type
  2107. Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
  2108. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2109. Dispatch const & d ) const
  2110. {
  2111. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2112. VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
  2113. VkResult result =
  2114. d.vkCreateShaderModule( m_device,
  2115. reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
  2116. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2117. reinterpret_cast<VkShaderModule *>( &shaderModule ) );
  2118. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" );
  2119. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), shaderModule );
  2120. }
  2121. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  2122. template <typename Dispatch>
  2123. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type
  2124. Device::createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
  2125. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2126. Dispatch const & d ) const
  2127. {
  2128. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2129. VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
  2130. VkResult result =
  2131. d.vkCreateShaderModule( m_device,
  2132. reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
  2133. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2134. reinterpret_cast<VkShaderModule *>( &shaderModule ) );
  2135. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" );
  2136. return createResultValueType(
  2137. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2138. UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>( shaderModule, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  2139. }
  2140. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  2141. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2142. template <typename Dispatch>
  2143. VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
  2144. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2145. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2146. {
  2147. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2148. d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2149. }
  2150. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2151. template <typename Dispatch>
  2152. VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
  2153. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2154. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2155. {
  2156. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2157. d.vkDestroyShaderModule( m_device,
  2158. static_cast<VkShaderModule>( shaderModule ),
  2159. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2160. }
  2161. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2162. template <typename Dispatch>
  2163. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
  2164. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2165. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2166. {
  2167. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2168. d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2169. }
  2170. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2171. template <typename Dispatch>
  2172. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
  2173. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2174. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2175. {
  2176. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2177. d.vkDestroyShaderModule( m_device,
  2178. static_cast<VkShaderModule>( shaderModule ),
  2179. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2180. }
  2181. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2182. template <typename Dispatch>
  2183. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo,
  2184. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2185. VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache,
  2186. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2187. {
  2188. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2189. return static_cast<Result>( d.vkCreatePipelineCache( m_device,
  2190. reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ),
  2191. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  2192. reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) );
  2193. }
  2194. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2195. template <typename Dispatch>
  2196. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type
  2197. Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,
  2198. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2199. Dispatch const & d ) const
  2200. {
  2201. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2202. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
  2203. VkResult result =
  2204. d.vkCreatePipelineCache( m_device,
  2205. reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
  2206. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2207. reinterpret_cast<VkPipelineCache *>( &pipelineCache ) );
  2208. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" );
  2209. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineCache );
  2210. }
  2211. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  2212. template <typename Dispatch>
  2213. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type
  2214. Device::createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,
  2215. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2216. Dispatch const & d ) const
  2217. {
  2218. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2219. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
  2220. VkResult result =
  2221. d.vkCreatePipelineCache( m_device,
  2222. reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
  2223. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2224. reinterpret_cast<VkPipelineCache *>( &pipelineCache ) );
  2225. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" );
  2226. return createResultValueType(
  2227. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2228. UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>( pipelineCache, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  2229. }
  2230. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  2231. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2232. template <typename Dispatch>
  2233. VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2234. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2235. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2236. {
  2237. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2238. d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2239. }
  2240. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2241. template <typename Dispatch>
  2242. VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2243. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2244. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2245. {
  2246. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2247. d.vkDestroyPipelineCache( m_device,
  2248. static_cast<VkPipelineCache>( pipelineCache ),
  2249. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2250. }
  2251. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2252. template <typename Dispatch>
  2253. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2254. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2255. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2256. {
  2257. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2258. d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2259. }
  2260. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2261. template <typename Dispatch>
  2262. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2263. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2264. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2265. {
  2266. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2267. d.vkDestroyPipelineCache( m_device,
  2268. static_cast<VkPipelineCache>( pipelineCache ),
  2269. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2270. }
  2271. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2272. template <typename Dispatch>
  2273. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2274. size_t * pDataSize,
  2275. void * pData,
  2276. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2277. {
  2278. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2279. return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
  2280. }
  2281. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2282. template <typename Uint8_tAllocator, typename Dispatch>
  2283. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
  2284. Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const
  2285. {
  2286. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2287. std::vector<uint8_t, Uint8_tAllocator> data;
  2288. size_t dataSize;
  2289. VkResult result;
  2290. do
  2291. {
  2292. result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr );
  2293. if ( ( result == VK_SUCCESS ) && dataSize )
  2294. {
  2295. data.resize( dataSize );
  2296. result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
  2297. }
  2298. } while ( result == VK_INCOMPLETE );
  2299. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
  2300. VULKAN_HPP_ASSERT( dataSize <= data.size() );
  2301. if ( dataSize < data.size() )
  2302. {
  2303. data.resize( dataSize );
  2304. }
  2305. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  2306. }
  2307. template <typename Uint8_tAllocator,
  2308. typename Dispatch,
  2309. typename B1,
  2310. typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type>
  2311. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
  2312. Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
  2313. {
  2314. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2315. std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
  2316. size_t dataSize;
  2317. VkResult result;
  2318. do
  2319. {
  2320. result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr );
  2321. if ( ( result == VK_SUCCESS ) && dataSize )
  2322. {
  2323. data.resize( dataSize );
  2324. result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
  2325. }
  2326. } while ( result == VK_INCOMPLETE );
  2327. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
  2328. VULKAN_HPP_ASSERT( dataSize <= data.size() );
  2329. if ( dataSize < data.size() )
  2330. {
  2331. data.resize( dataSize );
  2332. }
  2333. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  2334. }
  2335. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2336. template <typename Dispatch>
  2337. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
  2338. uint32_t srcCacheCount,
  2339. const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,
  2340. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2341. {
  2342. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2343. return static_cast<Result>(
  2344. d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) );
  2345. }
  2346. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2347. template <typename Dispatch>
  2348. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::mergePipelineCaches(
  2349. VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches, Dispatch const & d ) const
  2350. {
  2351. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2352. VkResult result = d.vkMergePipelineCaches(
  2353. m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size(), reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) );
  2354. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" );
  2355. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  2356. }
  2357. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2358. template <typename Dispatch>
  2359. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2360. uint32_t createInfoCount,
  2361. const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,
  2362. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2363. VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
  2364. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2365. {
  2366. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2367. return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device,
  2368. static_cast<VkPipelineCache>( pipelineCache ),
  2369. createInfoCount,
  2370. reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ),
  2371. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  2372. reinterpret_cast<VkPipeline *>( pPipelines ) ) );
  2373. }
  2374. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2375. template <typename PipelineAllocator, typename Dispatch>
  2376. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
  2377. Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2378. ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
  2379. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2380. Dispatch const & d ) const
  2381. {
  2382. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2383. std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
  2384. VkResult result = d.vkCreateGraphicsPipelines(
  2385. m_device,
  2386. static_cast<VkPipelineCache>( pipelineCache ),
  2387. createInfos.size(),
  2388. reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
  2389. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2390. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  2391. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2392. VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines",
  2393. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2394. return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
  2395. }
  2396. template <typename PipelineAllocator,
  2397. typename Dispatch,
  2398. typename B0,
  2399. typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
  2400. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
  2401. Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2402. ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
  2403. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2404. PipelineAllocator & pipelineAllocator,
  2405. Dispatch const & d ) const
  2406. {
  2407. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2408. std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
  2409. VkResult result = d.vkCreateGraphicsPipelines(
  2410. m_device,
  2411. static_cast<VkPipelineCache>( pipelineCache ),
  2412. createInfos.size(),
  2413. reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
  2414. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2415. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  2416. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2417. VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines",
  2418. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2419. return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
  2420. }
  2421. template <typename Dispatch>
  2422. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
  2423. Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2424. const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
  2425. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2426. Dispatch const & d ) const
  2427. {
  2428. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2429. VULKAN_HPP_NAMESPACE::Pipeline pipeline;
  2430. VkResult result = d.vkCreateGraphicsPipelines(
  2431. m_device,
  2432. static_cast<VkPipelineCache>( pipelineCache ),
  2433. 1,
  2434. reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
  2435. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2436. reinterpret_cast<VkPipeline *>( &pipeline ) );
  2437. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2438. VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline",
  2439. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2440. return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
  2441. }
  2442. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  2443. template <typename Dispatch, typename PipelineAllocator>
  2444. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
  2445. Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2446. ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
  2447. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2448. Dispatch const & d ) const
  2449. {
  2450. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2451. std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
  2452. VkResult result = d.vkCreateGraphicsPipelines(
  2453. m_device,
  2454. static_cast<VkPipelineCache>( pipelineCache ),
  2455. createInfos.size(),
  2456. reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
  2457. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2458. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  2459. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2460. VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique",
  2461. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2462. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
  2463. uniquePipelines.reserve( createInfos.size() );
  2464. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  2465. for ( auto const & pipeline : pipelines )
  2466. {
  2467. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
  2468. }
  2469. return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
  2470. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
  2471. }
  2472. template <typename Dispatch,
  2473. typename PipelineAllocator,
  2474. typename B0,
  2475. typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
  2476. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
  2477. Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2478. ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
  2479. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2480. PipelineAllocator & pipelineAllocator,
  2481. Dispatch const & d ) const
  2482. {
  2483. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2484. std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
  2485. VkResult result = d.vkCreateGraphicsPipelines(
  2486. m_device,
  2487. static_cast<VkPipelineCache>( pipelineCache ),
  2488. createInfos.size(),
  2489. reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
  2490. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2491. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  2492. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2493. VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique",
  2494. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2495. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
  2496. uniquePipelines.reserve( createInfos.size() );
  2497. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  2498. for ( auto const & pipeline : pipelines )
  2499. {
  2500. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
  2501. }
  2502. return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
  2503. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
  2504. }
  2505. template <typename Dispatch>
  2506. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
  2507. Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2508. const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
  2509. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2510. Dispatch const & d ) const
  2511. {
  2512. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2513. VULKAN_HPP_NAMESPACE::Pipeline pipeline;
  2514. VkResult result = d.vkCreateGraphicsPipelines(
  2515. m_device,
  2516. static_cast<VkPipelineCache>( pipelineCache ),
  2517. 1,
  2518. reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
  2519. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2520. reinterpret_cast<VkPipeline *>( &pipeline ) );
  2521. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2522. VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique",
  2523. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2524. return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>(
  2525. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2526. UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  2527. }
  2528. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  2529. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2530. template <typename Dispatch>
  2531. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2532. uint32_t createInfoCount,
  2533. const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,
  2534. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2535. VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
  2536. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2537. {
  2538. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2539. return static_cast<Result>( d.vkCreateComputePipelines( m_device,
  2540. static_cast<VkPipelineCache>( pipelineCache ),
  2541. createInfoCount,
  2542. reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ),
  2543. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  2544. reinterpret_cast<VkPipeline *>( pPipelines ) ) );
  2545. }
  2546. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2547. template <typename PipelineAllocator, typename Dispatch>
  2548. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
  2549. Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2550. ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
  2551. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2552. Dispatch const & d ) const
  2553. {
  2554. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2555. std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
  2556. VkResult result = d.vkCreateComputePipelines(
  2557. m_device,
  2558. static_cast<VkPipelineCache>( pipelineCache ),
  2559. createInfos.size(),
  2560. reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
  2561. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2562. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  2563. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2564. VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines",
  2565. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2566. return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
  2567. }
  2568. template <typename PipelineAllocator,
  2569. typename Dispatch,
  2570. typename B0,
  2571. typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
  2572. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
  2573. Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2574. ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
  2575. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2576. PipelineAllocator & pipelineAllocator,
  2577. Dispatch const & d ) const
  2578. {
  2579. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2580. std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
  2581. VkResult result = d.vkCreateComputePipelines(
  2582. m_device,
  2583. static_cast<VkPipelineCache>( pipelineCache ),
  2584. createInfos.size(),
  2585. reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
  2586. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2587. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  2588. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2589. VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines",
  2590. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2591. return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
  2592. }
  2593. template <typename Dispatch>
  2594. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
  2595. Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2596. const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
  2597. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2598. Dispatch const & d ) const
  2599. {
  2600. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2601. VULKAN_HPP_NAMESPACE::Pipeline pipeline;
  2602. VkResult result = d.vkCreateComputePipelines(
  2603. m_device,
  2604. static_cast<VkPipelineCache>( pipelineCache ),
  2605. 1,
  2606. reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
  2607. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2608. reinterpret_cast<VkPipeline *>( &pipeline ) );
  2609. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2610. VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline",
  2611. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2612. return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
  2613. }
  2614. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  2615. template <typename Dispatch, typename PipelineAllocator>
  2616. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
  2617. Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2618. ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
  2619. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2620. Dispatch const & d ) const
  2621. {
  2622. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2623. std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
  2624. VkResult result = d.vkCreateComputePipelines(
  2625. m_device,
  2626. static_cast<VkPipelineCache>( pipelineCache ),
  2627. createInfos.size(),
  2628. reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
  2629. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2630. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  2631. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2632. VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique",
  2633. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2634. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
  2635. uniquePipelines.reserve( createInfos.size() );
  2636. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  2637. for ( auto const & pipeline : pipelines )
  2638. {
  2639. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
  2640. }
  2641. return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
  2642. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
  2643. }
  2644. template <typename Dispatch,
  2645. typename PipelineAllocator,
  2646. typename B0,
  2647. typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
  2648. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
  2649. Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2650. ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
  2651. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2652. PipelineAllocator & pipelineAllocator,
  2653. Dispatch const & d ) const
  2654. {
  2655. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2656. std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
  2657. VkResult result = d.vkCreateComputePipelines(
  2658. m_device,
  2659. static_cast<VkPipelineCache>( pipelineCache ),
  2660. createInfos.size(),
  2661. reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
  2662. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2663. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  2664. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2665. VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique",
  2666. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2667. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
  2668. uniquePipelines.reserve( createInfos.size() );
  2669. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  2670. for ( auto const & pipeline : pipelines )
  2671. {
  2672. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
  2673. }
  2674. return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
  2675. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
  2676. }
  2677. template <typename Dispatch>
  2678. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
  2679. Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2680. const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
  2681. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2682. Dispatch const & d ) const
  2683. {
  2684. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2685. VULKAN_HPP_NAMESPACE::Pipeline pipeline;
  2686. VkResult result = d.vkCreateComputePipelines(
  2687. m_device,
  2688. static_cast<VkPipelineCache>( pipelineCache ),
  2689. 1,
  2690. reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
  2691. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2692. reinterpret_cast<VkPipeline *>( &pipeline ) );
  2693. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2694. VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique",
  2695. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2696. return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>(
  2697. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2698. UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  2699. }
  2700. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  2701. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2702. template <typename Dispatch>
  2703. VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  2704. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2705. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2706. {
  2707. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2708. d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2709. }
  2710. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2711. template <typename Dispatch>
  2712. VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  2713. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2714. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2715. {
  2716. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2717. d.vkDestroyPipeline( m_device,
  2718. static_cast<VkPipeline>( pipeline ),
  2719. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2720. }
  2721. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2722. template <typename Dispatch>
  2723. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  2724. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2725. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2726. {
  2727. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2728. d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2729. }
  2730. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2731. template <typename Dispatch>
  2732. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  2733. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2734. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2735. {
  2736. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2737. d.vkDestroyPipeline( m_device,
  2738. static_cast<VkPipeline>( pipeline ),
  2739. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2740. }
  2741. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2742. template <typename Dispatch>
  2743. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,
  2744. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2745. VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout,
  2746. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2747. {
  2748. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2749. return static_cast<Result>( d.vkCreatePipelineLayout( m_device,
  2750. reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ),
  2751. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  2752. reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) );
  2753. }
  2754. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2755. template <typename Dispatch>
  2756. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type
  2757. Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,
  2758. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2759. Dispatch const & d ) const
  2760. {
  2761. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2762. VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
  2763. VkResult result =
  2764. d.vkCreatePipelineLayout( m_device,
  2765. reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
  2766. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2767. reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) );
  2768. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" );
  2769. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineLayout );
  2770. }
  2771. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  2772. template <typename Dispatch>
  2773. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type
  2774. Device::createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,
  2775. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2776. Dispatch const & d ) const
  2777. {
  2778. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2779. VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
  2780. VkResult result =
  2781. d.vkCreatePipelineLayout( m_device,
  2782. reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
  2783. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2784. reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) );
  2785. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" );
  2786. return createResultValueType(
  2787. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2788. UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>( pipelineLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  2789. }
  2790. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  2791. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2792. template <typename Dispatch>
  2793. VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
  2794. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2795. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2796. {
  2797. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2798. d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2799. }
  2800. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2801. template <typename Dispatch>
  2802. VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
  2803. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2804. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2805. {
  2806. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2807. d.vkDestroyPipelineLayout( m_device,
  2808. static_cast<VkPipelineLayout>( pipelineLayout ),
  2809. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2810. }
  2811. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2812. template <typename Dispatch>
  2813. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
  2814. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2815. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2816. {
  2817. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2818. d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2819. }
  2820. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2821. template <typename Dispatch>
  2822. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
  2823. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2824. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2825. {
  2826. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2827. d.vkDestroyPipelineLayout( m_device,
  2828. static_cast<VkPipelineLayout>( pipelineLayout ),
  2829. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2830. }
  2831. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2832. template <typename Dispatch>
  2833. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo,
  2834. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2835. VULKAN_HPP_NAMESPACE::Sampler * pSampler,
  2836. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2837. {
  2838. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2839. return static_cast<Result>( d.vkCreateSampler( m_device,
  2840. reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ),
  2841. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  2842. reinterpret_cast<VkSampler *>( pSampler ) ) );
  2843. }
  2844. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2845. template <typename Dispatch>
  2846. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type Device::createSampler(
  2847. const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  2848. {
  2849. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2850. VULKAN_HPP_NAMESPACE::Sampler sampler;
  2851. VkResult result =
  2852. d.vkCreateSampler( m_device,
  2853. reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
  2854. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2855. reinterpret_cast<VkSampler *>( &sampler ) );
  2856. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" );
  2857. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), sampler );
  2858. }
  2859. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  2860. template <typename Dispatch>
  2861. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type Device::createSamplerUnique(
  2862. const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  2863. {
  2864. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2865. VULKAN_HPP_NAMESPACE::Sampler sampler;
  2866. VkResult result =
  2867. d.vkCreateSampler( m_device,
  2868. reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
  2869. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2870. reinterpret_cast<VkSampler *>( &sampler ) );
  2871. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" );
  2872. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2873. UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>( sampler, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  2874. }
  2875. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  2876. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2877. template <typename Dispatch>
  2878. VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler,
  2879. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2880. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2881. {
  2882. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2883. d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2884. }
  2885. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2886. template <typename Dispatch>
  2887. VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler,
  2888. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2889. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2890. {
  2891. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2892. d.vkDestroySampler( m_device,
  2893. static_cast<VkSampler>( sampler ),
  2894. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2895. }
  2896. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2897. template <typename Dispatch>
  2898. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
  2899. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2900. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2901. {
  2902. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2903. d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2904. }
  2905. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2906. template <typename Dispatch>
  2907. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
  2908. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2909. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2910. {
  2911. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2912. d.vkDestroySampler( m_device,
  2913. static_cast<VkSampler>( sampler ),
  2914. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2915. }
  2916. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2917. template <typename Dispatch>
  2918. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
  2919. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2920. VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout,
  2921. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2922. {
  2923. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2924. return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device,
  2925. reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
  2926. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  2927. reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) );
  2928. }
  2929. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2930. template <typename Dispatch>
  2931. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type
  2932. Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
  2933. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2934. Dispatch const & d ) const
  2935. {
  2936. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2937. VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
  2938. VkResult result = d.vkCreateDescriptorSetLayout(
  2939. m_device,
  2940. reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
  2941. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2942. reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) );
  2943. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" );
  2944. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), setLayout );
  2945. }
  2946. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  2947. template <typename Dispatch>
  2948. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type
  2949. Device::createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
  2950. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2951. Dispatch const & d ) const
  2952. {
  2953. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2954. VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
  2955. VkResult result = d.vkCreateDescriptorSetLayout(
  2956. m_device,
  2957. reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
  2958. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2959. reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) );
  2960. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" );
  2961. return createResultValueType(
  2962. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2963. UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>( setLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  2964. }
  2965. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  2966. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2967. template <typename Dispatch>
  2968. VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
  2969. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2970. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2971. {
  2972. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2973. d.vkDestroyDescriptorSetLayout(
  2974. m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2975. }
  2976. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2977. template <typename Dispatch>
  2978. VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
  2979. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2980. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2981. {
  2982. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2983. d.vkDestroyDescriptorSetLayout(
  2984. m_device,
  2985. static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
  2986. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2987. }
  2988. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  2989. template <typename Dispatch>
  2990. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
  2991. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2992. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2993. {
  2994. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2995. d.vkDestroyDescriptorSetLayout(
  2996. m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2997. }
  2998. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2999. template <typename Dispatch>
  3000. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
  3001. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3002. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3003. {
  3004. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3005. d.vkDestroyDescriptorSetLayout(
  3006. m_device,
  3007. static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
  3008. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  3009. }
  3010. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3011. template <typename Dispatch>
  3012. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,
  3013. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3014. VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool,
  3015. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3016. {
  3017. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3018. return static_cast<Result>( d.vkCreateDescriptorPool( m_device,
  3019. reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ),
  3020. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  3021. reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) );
  3022. }
  3023. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3024. template <typename Dispatch>
  3025. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type
  3026. Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,
  3027. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3028. Dispatch const & d ) const
  3029. {
  3030. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3031. VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
  3032. VkResult result =
  3033. d.vkCreateDescriptorPool( m_device,
  3034. reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
  3035. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  3036. reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) );
  3037. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" );
  3038. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorPool );
  3039. }
  3040. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  3041. template <typename Dispatch>
  3042. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type
  3043. Device::createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,
  3044. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3045. Dispatch const & d ) const
  3046. {
  3047. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3048. VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
  3049. VkResult result =
  3050. d.vkCreateDescriptorPool( m_device,
  3051. reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
  3052. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  3053. reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) );
  3054. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" );
  3055. return createResultValueType(
  3056. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  3057. UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>( descriptorPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  3058. }
  3059. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  3060. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3061. template <typename Dispatch>
  3062. VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3063. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3064. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3065. {
  3066. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3067. d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  3068. }
  3069. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3070. template <typename Dispatch>
  3071. VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3072. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3073. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3074. {
  3075. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3076. d.vkDestroyDescriptorPool( m_device,
  3077. static_cast<VkDescriptorPool>( descriptorPool ),
  3078. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  3079. }
  3080. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3081. template <typename Dispatch>
  3082. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3083. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3084. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3085. {
  3086. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3087. d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  3088. }
  3089. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3090. template <typename Dispatch>
  3091. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3092. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3093. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3094. {
  3095. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3096. d.vkDestroyDescriptorPool( m_device,
  3097. static_cast<VkDescriptorPool>( descriptorPool ),
  3098. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  3099. }
  3100. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3101. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3102. template <typename Dispatch>
  3103. VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3104. VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,
  3105. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3106. {
  3107. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3108. return static_cast<Result>(
  3109. d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
  3110. }
  3111. #else
  3112. template <typename Dispatch>
  3113. VULKAN_HPP_INLINE void Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3114. VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,
  3115. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3116. {
  3117. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3118. d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) );
  3119. }
  3120. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3121. template <typename Dispatch>
  3122. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,
  3123. VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
  3124. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3125. {
  3126. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3127. return static_cast<Result>( d.vkAllocateDescriptorSets(
  3128. m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) );
  3129. }
  3130. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3131. template <typename DescriptorSetAllocator, typename Dispatch>
  3132. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type
  3133. Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
  3134. {
  3135. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3136. std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount );
  3137. VkResult result = d.vkAllocateDescriptorSets(
  3138. m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
  3139. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
  3140. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorSets );
  3141. }
  3142. template <typename DescriptorSetAllocator,
  3143. typename Dispatch,
  3144. typename B0,
  3145. typename std::enable_if<std::is_same<typename B0::value_type, DescriptorSet>::value, int>::type>
  3146. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type
  3147. Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,
  3148. DescriptorSetAllocator & descriptorSetAllocator,
  3149. Dispatch const & d ) const
  3150. {
  3151. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3152. std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator );
  3153. VkResult result = d.vkAllocateDescriptorSets(
  3154. m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
  3155. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
  3156. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorSets );
  3157. }
  3158. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  3159. template <typename Dispatch, typename DescriptorSetAllocator>
  3160. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  3161. typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
  3162. Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
  3163. {
  3164. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3165. std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
  3166. VkResult result = d.vkAllocateDescriptorSets(
  3167. m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
  3168. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
  3169. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets;
  3170. uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
  3171. PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
  3172. for ( auto const & descriptorSet : descriptorSets )
  3173. {
  3174. uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) );
  3175. }
  3176. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueDescriptorSets ) );
  3177. }
  3178. template <typename Dispatch,
  3179. typename DescriptorSetAllocator,
  3180. typename B0,
  3181. typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, int>::type>
  3182. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  3183. typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
  3184. Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,
  3185. DescriptorSetAllocator & descriptorSetAllocator,
  3186. Dispatch const & d ) const
  3187. {
  3188. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3189. std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
  3190. VkResult result = d.vkAllocateDescriptorSets(
  3191. m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
  3192. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
  3193. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator );
  3194. uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
  3195. PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
  3196. for ( auto const & descriptorSet : descriptorSets )
  3197. {
  3198. uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) );
  3199. }
  3200. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueDescriptorSets ) );
  3201. }
  3202. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  3203. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3204. template <typename Dispatch>
  3205. VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3206. uint32_t descriptorSetCount,
  3207. const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
  3208. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3209. {
  3210. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3211. return static_cast<Result>( d.vkFreeDescriptorSets(
  3212. m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
  3213. }
  3214. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3215. template <typename Dispatch>
  3216. VULKAN_HPP_INLINE void Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3217. ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
  3218. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3219. {
  3220. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3221. d.vkFreeDescriptorSets(
  3222. m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) );
  3223. }
  3224. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3225. template <typename Dispatch>
  3226. VULKAN_HPP_INLINE Result( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3227. uint32_t descriptorSetCount,
  3228. const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
  3229. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3230. {
  3231. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3232. return static_cast<Result>( d.vkFreeDescriptorSets(
  3233. m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
  3234. }
  3235. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3236. template <typename Dispatch>
  3237. VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3238. ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
  3239. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3240. {
  3241. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3242. d.vkFreeDescriptorSets(
  3243. m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) );
  3244. }
  3245. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3246. template <typename Dispatch>
  3247. VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount,
  3248. const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
  3249. uint32_t descriptorCopyCount,
  3250. const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies,
  3251. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3252. {
  3253. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3254. d.vkUpdateDescriptorSets( m_device,
  3255. descriptorWriteCount,
  3256. reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ),
  3257. descriptorCopyCount,
  3258. reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) );
  3259. }
  3260. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3261. template <typename Dispatch>
  3262. VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
  3263. ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies,
  3264. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3265. {
  3266. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3267. d.vkUpdateDescriptorSets( m_device,
  3268. descriptorWrites.size(),
  3269. reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ),
  3270. descriptorCopies.size(),
  3271. reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) );
  3272. }
  3273. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3274. template <typename Dispatch>
  3275. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,
  3276. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3277. VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer,
  3278. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3279. {
  3280. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3281. return static_cast<Result>( d.vkCreateFramebuffer( m_device,
  3282. reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ),
  3283. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  3284. reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) );
  3285. }
  3286. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3287. template <typename Dispatch>
  3288. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type
  3289. Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,
  3290. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3291. Dispatch const & d ) const
  3292. {
  3293. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3294. VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
  3295. VkResult result =
  3296. d.vkCreateFramebuffer( m_device,
  3297. reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
  3298. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  3299. reinterpret_cast<VkFramebuffer *>( &framebuffer ) );
  3300. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" );
  3301. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), framebuffer );
  3302. }
  3303. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  3304. template <typename Dispatch>
  3305. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type
  3306. Device::createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,
  3307. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3308. Dispatch const & d ) const
  3309. {
  3310. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3311. VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
  3312. VkResult result =
  3313. d.vkCreateFramebuffer( m_device,
  3314. reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
  3315. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  3316. reinterpret_cast<VkFramebuffer *>( &framebuffer ) );
  3317. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" );
  3318. return createResultValueType(
  3319. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  3320. UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>( framebuffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  3321. }
  3322. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  3323. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3324. template <typename Dispatch>
  3325. VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
  3326. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3327. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3328. {
  3329. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3330. d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  3331. }
  3332. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3333. template <typename Dispatch>
  3334. VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
  3335. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3336. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3337. {
  3338. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3339. d.vkDestroyFramebuffer( m_device,
  3340. static_cast<VkFramebuffer>( framebuffer ),
  3341. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  3342. }
  3343. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3344. template <typename Dispatch>
  3345. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
  3346. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3347. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3348. {
  3349. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3350. d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  3351. }
  3352. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3353. template <typename Dispatch>
  3354. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
  3355. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3356. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3357. {
  3358. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3359. d.vkDestroyFramebuffer( m_device,
  3360. static_cast<VkFramebuffer>( framebuffer ),
  3361. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  3362. }
  3363. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3364. template <typename Dispatch>
  3365. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,
  3366. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3367. VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
  3368. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3369. {
  3370. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3371. return static_cast<Result>( d.vkCreateRenderPass( m_device,
  3372. reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ),
  3373. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  3374. reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
  3375. }
  3376. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3377. template <typename Dispatch>
  3378. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
  3379. Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,
  3380. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3381. Dispatch const & d ) const
  3382. {
  3383. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3384. VULKAN_HPP_NAMESPACE::RenderPass renderPass;
  3385. VkResult result =
  3386. d.vkCreateRenderPass( m_device,
  3387. reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
  3388. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  3389. reinterpret_cast<VkRenderPass *>( &renderPass ) );
  3390. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" );
  3391. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
  3392. }
  3393. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  3394. template <typename Dispatch>
  3395. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
  3396. Device::createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,
  3397. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3398. Dispatch const & d ) const
  3399. {
  3400. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3401. VULKAN_HPP_NAMESPACE::RenderPass renderPass;
  3402. VkResult result =
  3403. d.vkCreateRenderPass( m_device,
  3404. reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
  3405. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  3406. reinterpret_cast<VkRenderPass *>( &renderPass ) );
  3407. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" );
  3408. return createResultValueType(
  3409. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  3410. UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  3411. }
  3412. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  3413. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3414. template <typename Dispatch>
  3415. VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
  3416. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3417. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3418. {
  3419. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3420. d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  3421. }
  3422. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3423. template <typename Dispatch>
  3424. VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
  3425. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3426. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3427. {
  3428. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3429. d.vkDestroyRenderPass( m_device,
  3430. static_cast<VkRenderPass>( renderPass ),
  3431. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  3432. }
  3433. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3434. template <typename Dispatch>
  3435. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
  3436. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3437. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3438. {
  3439. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3440. d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  3441. }
  3442. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3443. template <typename Dispatch>
  3444. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
  3445. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3446. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3447. {
  3448. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3449. d.vkDestroyRenderPass( m_device,
  3450. static_cast<VkRenderPass>( renderPass ),
  3451. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  3452. }
  3453. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3454. template <typename Dispatch>
  3455. VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
  3456. VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
  3457. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3458. {
  3459. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3460. d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
  3461. }
  3462. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3463. template <typename Dispatch>
  3464. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
  3465. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3466. {
  3467. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3468. VULKAN_HPP_NAMESPACE::Extent2D granularity;
  3469. d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) );
  3470. return granularity;
  3471. }
  3472. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3473. template <typename Dispatch>
  3474. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,
  3475. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3476. VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool,
  3477. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3478. {
  3479. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3480. return static_cast<Result>( d.vkCreateCommandPool( m_device,
  3481. reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ),
  3482. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  3483. reinterpret_cast<VkCommandPool *>( pCommandPool ) ) );
  3484. }
  3485. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3486. template <typename Dispatch>
  3487. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type
  3488. Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,
  3489. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3490. Dispatch const & d ) const
  3491. {
  3492. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3493. VULKAN_HPP_NAMESPACE::CommandPool commandPool;
  3494. VkResult result =
  3495. d.vkCreateCommandPool( m_device,
  3496. reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
  3497. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  3498. reinterpret_cast<VkCommandPool *>( &commandPool ) );
  3499. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" );
  3500. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandPool );
  3501. }
  3502. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  3503. template <typename Dispatch>
  3504. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type
  3505. Device::createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,
  3506. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3507. Dispatch const & d ) const
  3508. {
  3509. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3510. VULKAN_HPP_NAMESPACE::CommandPool commandPool;
  3511. VkResult result =
  3512. d.vkCreateCommandPool( m_device,
  3513. reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
  3514. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  3515. reinterpret_cast<VkCommandPool *>( &commandPool ) );
  3516. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" );
  3517. return createResultValueType(
  3518. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  3519. UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>( commandPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  3520. }
  3521. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  3522. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3523. template <typename Dispatch>
  3524. VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  3525. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3526. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3527. {
  3528. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3529. d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  3530. }
  3531. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3532. template <typename Dispatch>
  3533. VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  3534. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3535. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3536. {
  3537. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3538. d.vkDestroyCommandPool( m_device,
  3539. static_cast<VkCommandPool>( commandPool ),
  3540. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  3541. }
  3542. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3543. template <typename Dispatch>
  3544. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  3545. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3546. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3547. {
  3548. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3549. d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  3550. }
  3551. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3552. template <typename Dispatch>
  3553. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  3554. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3555. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3556. {
  3557. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3558. d.vkDestroyCommandPool( m_device,
  3559. static_cast<VkCommandPool>( commandPool ),
  3560. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  3561. }
  3562. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3563. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3564. template <typename Dispatch>
  3565. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  3566. VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,
  3567. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3568. {
  3569. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3570. return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
  3571. }
  3572. #else
  3573. template <typename Dispatch>
  3574. VULKAN_HPP_INLINE typename ResultValueType<void>::type
  3575. Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const
  3576. {
  3577. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3578. VkResult result = d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) );
  3579. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" );
  3580. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  3581. }
  3582. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3583. template <typename Dispatch>
  3584. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,
  3585. VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
  3586. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3587. {
  3588. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3589. return static_cast<Result>( d.vkAllocateCommandBuffers(
  3590. m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) );
  3591. }
  3592. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3593. template <typename CommandBufferAllocator, typename Dispatch>
  3594. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type
  3595. Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
  3596. {
  3597. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3598. std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount );
  3599. VkResult result = d.vkAllocateCommandBuffers(
  3600. m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
  3601. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
  3602. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandBuffers );
  3603. }
  3604. template <typename CommandBufferAllocator,
  3605. typename Dispatch,
  3606. typename B0,
  3607. typename std::enable_if<std::is_same<typename B0::value_type, CommandBuffer>::value, int>::type>
  3608. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type
  3609. Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,
  3610. CommandBufferAllocator & commandBufferAllocator,
  3611. Dispatch const & d ) const
  3612. {
  3613. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3614. std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator );
  3615. VkResult result = d.vkAllocateCommandBuffers(
  3616. m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
  3617. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
  3618. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandBuffers );
  3619. }
  3620. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  3621. template <typename Dispatch, typename CommandBufferAllocator>
  3622. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  3623. typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
  3624. Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
  3625. {
  3626. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3627. std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
  3628. VkResult result = d.vkAllocateCommandBuffers(
  3629. m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
  3630. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
  3631. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers;
  3632. uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
  3633. PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
  3634. for ( auto const & commandBuffer : commandBuffers )
  3635. {
  3636. uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) );
  3637. }
  3638. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueCommandBuffers ) );
  3639. }
  3640. template <typename Dispatch,
  3641. typename CommandBufferAllocator,
  3642. typename B0,
  3643. typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, int>::type>
  3644. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  3645. typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
  3646. Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,
  3647. CommandBufferAllocator & commandBufferAllocator,
  3648. Dispatch const & d ) const
  3649. {
  3650. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3651. std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
  3652. VkResult result = d.vkAllocateCommandBuffers(
  3653. m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
  3654. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
  3655. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator );
  3656. uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
  3657. PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
  3658. for ( auto const & commandBuffer : commandBuffers )
  3659. {
  3660. uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) );
  3661. }
  3662. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueCommandBuffers ) );
  3663. }
  3664. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  3665. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3666. template <typename Dispatch>
  3667. VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  3668. uint32_t commandBufferCount,
  3669. const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
  3670. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3671. {
  3672. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3673. d.vkFreeCommandBuffers(
  3674. m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
  3675. }
  3676. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3677. template <typename Dispatch>
  3678. VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  3679. ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
  3680. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3681. {
  3682. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3683. d.vkFreeCommandBuffers(
  3684. m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
  3685. }
  3686. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3687. template <typename Dispatch>
  3688. VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  3689. uint32_t commandBufferCount,
  3690. const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
  3691. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3692. {
  3693. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3694. d.vkFreeCommandBuffers(
  3695. m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
  3696. }
  3697. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3698. template <typename Dispatch>
  3699. VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  3700. ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
  3701. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3702. {
  3703. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3704. d.vkFreeCommandBuffers(
  3705. m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
  3706. }
  3707. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3708. template <typename Dispatch>
  3709. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo,
  3710. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3711. {
  3712. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3713. return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) );
  3714. }
  3715. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3716. template <typename Dispatch>
  3717. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  3718. CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const
  3719. {
  3720. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3721. VkResult result = d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) );
  3722. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
  3723. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  3724. }
  3725. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3726. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3727. template <typename Dispatch>
  3728. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3729. {
  3730. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3731. return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
  3732. }
  3733. #else
  3734. template <typename Dispatch>
  3735. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end( Dispatch const & d ) const
  3736. {
  3737. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3738. VkResult result = d.vkEndCommandBuffer( m_commandBuffer );
  3739. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
  3740. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  3741. }
  3742. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3743. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3744. template <typename Dispatch>
  3745. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,
  3746. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3747. {
  3748. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3749. return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
  3750. }
  3751. #else
  3752. template <typename Dispatch>
  3753. VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const
  3754. {
  3755. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3756. VkResult result = d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) );
  3757. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
  3758. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  3759. }
  3760. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3761. template <typename Dispatch>
  3762. VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  3763. VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  3764. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3765. {
  3766. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3767. d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
  3768. }
  3769. template <typename Dispatch>
  3770. VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport,
  3771. uint32_t viewportCount,
  3772. const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
  3773. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3774. {
  3775. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3776. d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
  3777. }
  3778. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3779. template <typename Dispatch>
  3780. VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport,
  3781. ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
  3782. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3783. {
  3784. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3785. d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
  3786. }
  3787. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3788. template <typename Dispatch>
  3789. VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor,
  3790. uint32_t scissorCount,
  3791. const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
  3792. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3793. {
  3794. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3795. d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
  3796. }
  3797. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3798. template <typename Dispatch>
  3799. VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor,
  3800. ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
  3801. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3802. {
  3803. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3804. d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
  3805. }
  3806. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3807. template <typename Dispatch>
  3808. VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3809. {
  3810. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3811. d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
  3812. }
  3813. template <typename Dispatch>
  3814. VULKAN_HPP_INLINE void
  3815. CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3816. {
  3817. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3818. d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
  3819. }
  3820. template <typename Dispatch>
  3821. VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3822. {
  3823. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3824. d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
  3825. }
  3826. template <typename Dispatch>
  3827. VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3828. {
  3829. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3830. d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
  3831. }
  3832. template <typename Dispatch>
  3833. VULKAN_HPP_INLINE void
  3834. CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3835. {
  3836. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3837. d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
  3838. }
  3839. template <typename Dispatch>
  3840. VULKAN_HPP_INLINE void
  3841. CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3842. {
  3843. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3844. d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
  3845. }
  3846. template <typename Dispatch>
  3847. VULKAN_HPP_INLINE void
  3848. CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3849. {
  3850. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3851. d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
  3852. }
  3853. template <typename Dispatch>
  3854. VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  3855. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  3856. uint32_t firstSet,
  3857. uint32_t descriptorSetCount,
  3858. const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
  3859. uint32_t dynamicOffsetCount,
  3860. const uint32_t * pDynamicOffsets,
  3861. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3862. {
  3863. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3864. d.vkCmdBindDescriptorSets( m_commandBuffer,
  3865. static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
  3866. static_cast<VkPipelineLayout>( layout ),
  3867. firstSet,
  3868. descriptorSetCount,
  3869. reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ),
  3870. dynamicOffsetCount,
  3871. pDynamicOffsets );
  3872. }
  3873. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3874. template <typename Dispatch>
  3875. VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  3876. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  3877. uint32_t firstSet,
  3878. ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
  3879. ArrayProxy<const uint32_t> const & dynamicOffsets,
  3880. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3881. {
  3882. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3883. d.vkCmdBindDescriptorSets( m_commandBuffer,
  3884. static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
  3885. static_cast<VkPipelineLayout>( layout ),
  3886. firstSet,
  3887. descriptorSets.size(),
  3888. reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ),
  3889. dynamicOffsets.size(),
  3890. dynamicOffsets.data() );
  3891. }
  3892. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3893. template <typename Dispatch>
  3894. VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
  3895. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  3896. VULKAN_HPP_NAMESPACE::IndexType indexType,
  3897. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3898. {
  3899. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3900. d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) );
  3901. }
  3902. template <typename Dispatch>
  3903. VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding,
  3904. uint32_t bindingCount,
  3905. const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
  3906. const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
  3907. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3908. {
  3909. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3910. d.vkCmdBindVertexBuffers(
  3911. m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
  3912. }
  3913. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3914. template <typename Dispatch>
  3915. VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding,
  3916. ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
  3917. ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
  3918. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  3919. {
  3920. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3921. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  3922. VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
  3923. # else
  3924. if ( buffers.size() != offsets.size() )
  3925. {
  3926. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
  3927. }
  3928. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  3929. d.vkCmdBindVertexBuffers( m_commandBuffer,
  3930. firstBinding,
  3931. buffers.size(),
  3932. reinterpret_cast<const VkBuffer *>( buffers.data() ),
  3933. reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
  3934. }
  3935. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3936. template <typename Dispatch>
  3937. VULKAN_HPP_INLINE void CommandBuffer::draw(
  3938. uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3939. {
  3940. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3941. d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
  3942. }
  3943. template <typename Dispatch>
  3944. VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount,
  3945. uint32_t instanceCount,
  3946. uint32_t firstIndex,
  3947. int32_t vertexOffset,
  3948. uint32_t firstInstance,
  3949. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3950. {
  3951. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3952. d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
  3953. }
  3954. template <typename Dispatch>
  3955. VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
  3956. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  3957. uint32_t drawCount,
  3958. uint32_t stride,
  3959. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3960. {
  3961. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3962. d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
  3963. }
  3964. template <typename Dispatch>
  3965. VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
  3966. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  3967. uint32_t drawCount,
  3968. uint32_t stride,
  3969. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3970. {
  3971. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3972. d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
  3973. }
  3974. template <typename Dispatch>
  3975. VULKAN_HPP_INLINE void
  3976. CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3977. {
  3978. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3979. d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
  3980. }
  3981. template <typename Dispatch>
  3982. VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
  3983. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  3984. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3985. {
  3986. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3987. d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
  3988. }
  3989. template <typename Dispatch>
  3990. VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
  3991. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  3992. uint32_t regionCount,
  3993. const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions,
  3994. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3995. {
  3996. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3997. d.vkCmdCopyBuffer( m_commandBuffer,
  3998. static_cast<VkBuffer>( srcBuffer ),
  3999. static_cast<VkBuffer>( dstBuffer ),
  4000. regionCount,
  4001. reinterpret_cast<const VkBufferCopy *>( pRegions ) );
  4002. }
  4003. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4004. template <typename Dispatch>
  4005. VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
  4006. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4007. ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,
  4008. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4009. {
  4010. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4011. d.vkCmdCopyBuffer( m_commandBuffer,
  4012. static_cast<VkBuffer>( srcBuffer ),
  4013. static_cast<VkBuffer>( dstBuffer ),
  4014. regions.size(),
  4015. reinterpret_cast<const VkBufferCopy *>( regions.data() ) );
  4016. }
  4017. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4018. template <typename Dispatch>
  4019. VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
  4020. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4021. VULKAN_HPP_NAMESPACE::Image dstImage,
  4022. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4023. uint32_t regionCount,
  4024. const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions,
  4025. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4026. {
  4027. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4028. d.vkCmdCopyImage( m_commandBuffer,
  4029. static_cast<VkImage>( srcImage ),
  4030. static_cast<VkImageLayout>( srcImageLayout ),
  4031. static_cast<VkImage>( dstImage ),
  4032. static_cast<VkImageLayout>( dstImageLayout ),
  4033. regionCount,
  4034. reinterpret_cast<const VkImageCopy *>( pRegions ) );
  4035. }
  4036. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4037. template <typename Dispatch>
  4038. VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
  4039. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4040. VULKAN_HPP_NAMESPACE::Image dstImage,
  4041. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4042. ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,
  4043. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4044. {
  4045. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4046. d.vkCmdCopyImage( m_commandBuffer,
  4047. static_cast<VkImage>( srcImage ),
  4048. static_cast<VkImageLayout>( srcImageLayout ),
  4049. static_cast<VkImage>( dstImage ),
  4050. static_cast<VkImageLayout>( dstImageLayout ),
  4051. regions.size(),
  4052. reinterpret_cast<const VkImageCopy *>( regions.data() ) );
  4053. }
  4054. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4055. template <typename Dispatch>
  4056. VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
  4057. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4058. VULKAN_HPP_NAMESPACE::Image dstImage,
  4059. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4060. uint32_t regionCount,
  4061. const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions,
  4062. VULKAN_HPP_NAMESPACE::Filter filter,
  4063. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4064. {
  4065. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4066. d.vkCmdBlitImage( m_commandBuffer,
  4067. static_cast<VkImage>( srcImage ),
  4068. static_cast<VkImageLayout>( srcImageLayout ),
  4069. static_cast<VkImage>( dstImage ),
  4070. static_cast<VkImageLayout>( dstImageLayout ),
  4071. regionCount,
  4072. reinterpret_cast<const VkImageBlit *>( pRegions ),
  4073. static_cast<VkFilter>( filter ) );
  4074. }
  4075. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4076. template <typename Dispatch>
  4077. VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
  4078. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4079. VULKAN_HPP_NAMESPACE::Image dstImage,
  4080. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4081. ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
  4082. VULKAN_HPP_NAMESPACE::Filter filter,
  4083. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4084. {
  4085. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4086. d.vkCmdBlitImage( m_commandBuffer,
  4087. static_cast<VkImage>( srcImage ),
  4088. static_cast<VkImageLayout>( srcImageLayout ),
  4089. static_cast<VkImage>( dstImage ),
  4090. static_cast<VkImageLayout>( dstImageLayout ),
  4091. regions.size(),
  4092. reinterpret_cast<const VkImageBlit *>( regions.data() ),
  4093. static_cast<VkFilter>( filter ) );
  4094. }
  4095. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4096. template <typename Dispatch>
  4097. VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
  4098. VULKAN_HPP_NAMESPACE::Image dstImage,
  4099. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4100. uint32_t regionCount,
  4101. const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
  4102. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4103. {
  4104. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4105. d.vkCmdCopyBufferToImage( m_commandBuffer,
  4106. static_cast<VkBuffer>( srcBuffer ),
  4107. static_cast<VkImage>( dstImage ),
  4108. static_cast<VkImageLayout>( dstImageLayout ),
  4109. regionCount,
  4110. reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
  4111. }
  4112. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4113. template <typename Dispatch>
  4114. VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
  4115. VULKAN_HPP_NAMESPACE::Image dstImage,
  4116. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4117. ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
  4118. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4119. {
  4120. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4121. d.vkCmdCopyBufferToImage( m_commandBuffer,
  4122. static_cast<VkBuffer>( srcBuffer ),
  4123. static_cast<VkImage>( dstImage ),
  4124. static_cast<VkImageLayout>( dstImageLayout ),
  4125. regions.size(),
  4126. reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
  4127. }
  4128. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4129. template <typename Dispatch>
  4130. VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
  4131. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4132. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4133. uint32_t regionCount,
  4134. const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
  4135. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4136. {
  4137. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4138. d.vkCmdCopyImageToBuffer( m_commandBuffer,
  4139. static_cast<VkImage>( srcImage ),
  4140. static_cast<VkImageLayout>( srcImageLayout ),
  4141. static_cast<VkBuffer>( dstBuffer ),
  4142. regionCount,
  4143. reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
  4144. }
  4145. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4146. template <typename Dispatch>
  4147. VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
  4148. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4149. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4150. ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
  4151. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4152. {
  4153. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4154. d.vkCmdCopyImageToBuffer( m_commandBuffer,
  4155. static_cast<VkImage>( srcImage ),
  4156. static_cast<VkImageLayout>( srcImageLayout ),
  4157. static_cast<VkBuffer>( dstBuffer ),
  4158. regions.size(),
  4159. reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
  4160. }
  4161. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4162. template <typename Dispatch>
  4163. VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4164. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  4165. VULKAN_HPP_NAMESPACE::DeviceSize dataSize,
  4166. const void * pData,
  4167. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4168. {
  4169. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4170. d.vkCmdUpdateBuffer(
  4171. m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData );
  4172. }
  4173. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4174. template <typename DataType, typename Dispatch>
  4175. VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4176. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  4177. ArrayProxy<const DataType> const & data,
  4178. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4179. {
  4180. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4181. d.vkCmdUpdateBuffer( m_commandBuffer,
  4182. static_cast<VkBuffer>( dstBuffer ),
  4183. static_cast<VkDeviceSize>( dstOffset ),
  4184. data.size() * sizeof( DataType ),
  4185. reinterpret_cast<const void *>( data.data() ) );
  4186. }
  4187. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4188. template <typename Dispatch>
  4189. VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4190. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  4191. VULKAN_HPP_NAMESPACE::DeviceSize size,
  4192. uint32_t data,
  4193. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4194. {
  4195. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4196. d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data );
  4197. }
  4198. template <typename Dispatch>
  4199. VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
  4200. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
  4201. const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor,
  4202. uint32_t rangeCount,
  4203. const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,
  4204. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4205. {
  4206. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4207. d.vkCmdClearColorImage( m_commandBuffer,
  4208. static_cast<VkImage>( image ),
  4209. static_cast<VkImageLayout>( imageLayout ),
  4210. reinterpret_cast<const VkClearColorValue *>( pColor ),
  4211. rangeCount,
  4212. reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
  4213. }
  4214. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4215. template <typename Dispatch>
  4216. VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
  4217. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
  4218. const VULKAN_HPP_NAMESPACE::ClearColorValue & color,
  4219. ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
  4220. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4221. {
  4222. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4223. d.vkCmdClearColorImage( m_commandBuffer,
  4224. static_cast<VkImage>( image ),
  4225. static_cast<VkImageLayout>( imageLayout ),
  4226. reinterpret_cast<const VkClearColorValue *>( &color ),
  4227. ranges.size(),
  4228. reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
  4229. }
  4230. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4231. template <typename Dispatch>
  4232. VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
  4233. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
  4234. const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil,
  4235. uint32_t rangeCount,
  4236. const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,
  4237. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4238. {
  4239. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4240. d.vkCmdClearDepthStencilImage( m_commandBuffer,
  4241. static_cast<VkImage>( image ),
  4242. static_cast<VkImageLayout>( imageLayout ),
  4243. reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ),
  4244. rangeCount,
  4245. reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
  4246. }
  4247. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4248. template <typename Dispatch>
  4249. VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
  4250. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
  4251. const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,
  4252. ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
  4253. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4254. {
  4255. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4256. d.vkCmdClearDepthStencilImage( m_commandBuffer,
  4257. static_cast<VkImage>( image ),
  4258. static_cast<VkImageLayout>( imageLayout ),
  4259. reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ),
  4260. ranges.size(),
  4261. reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
  4262. }
  4263. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4264. template <typename Dispatch>
  4265. VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount,
  4266. const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments,
  4267. uint32_t rectCount,
  4268. const VULKAN_HPP_NAMESPACE::ClearRect * pRects,
  4269. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4270. {
  4271. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4272. d.vkCmdClearAttachments( m_commandBuffer,
  4273. attachmentCount,
  4274. reinterpret_cast<const VkClearAttachment *>( pAttachments ),
  4275. rectCount,
  4276. reinterpret_cast<const VkClearRect *>( pRects ) );
  4277. }
  4278. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4279. template <typename Dispatch>
  4280. VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
  4281. ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects,
  4282. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4283. {
  4284. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4285. d.vkCmdClearAttachments( m_commandBuffer,
  4286. attachments.size(),
  4287. reinterpret_cast<const VkClearAttachment *>( attachments.data() ),
  4288. rects.size(),
  4289. reinterpret_cast<const VkClearRect *>( rects.data() ) );
  4290. }
  4291. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4292. template <typename Dispatch>
  4293. VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
  4294. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4295. VULKAN_HPP_NAMESPACE::Image dstImage,
  4296. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4297. uint32_t regionCount,
  4298. const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions,
  4299. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4300. {
  4301. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4302. d.vkCmdResolveImage( m_commandBuffer,
  4303. static_cast<VkImage>( srcImage ),
  4304. static_cast<VkImageLayout>( srcImageLayout ),
  4305. static_cast<VkImage>( dstImage ),
  4306. static_cast<VkImageLayout>( dstImageLayout ),
  4307. regionCount,
  4308. reinterpret_cast<const VkImageResolve *>( pRegions ) );
  4309. }
  4310. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4311. template <typename Dispatch>
  4312. VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
  4313. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4314. VULKAN_HPP_NAMESPACE::Image dstImage,
  4315. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4316. ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,
  4317. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4318. {
  4319. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4320. d.vkCmdResolveImage( m_commandBuffer,
  4321. static_cast<VkImage>( srcImage ),
  4322. static_cast<VkImageLayout>( srcImageLayout ),
  4323. static_cast<VkImage>( dstImage ),
  4324. static_cast<VkImageLayout>( dstImageLayout ),
  4325. regions.size(),
  4326. reinterpret_cast<const VkImageResolve *>( regions.data() ) );
  4327. }
  4328. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4329. template <typename Dispatch>
  4330. VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event,
  4331. VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
  4332. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4333. {
  4334. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4335. d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
  4336. }
  4337. template <typename Dispatch>
  4338. VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event,
  4339. VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
  4340. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4341. {
  4342. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4343. d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
  4344. }
  4345. template <typename Dispatch>
  4346. VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount,
  4347. const VULKAN_HPP_NAMESPACE::Event * pEvents,
  4348. VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
  4349. VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
  4350. uint32_t memoryBarrierCount,
  4351. const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,
  4352. uint32_t bufferMemoryBarrierCount,
  4353. const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
  4354. uint32_t imageMemoryBarrierCount,
  4355. const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
  4356. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4357. {
  4358. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4359. d.vkCmdWaitEvents( m_commandBuffer,
  4360. eventCount,
  4361. reinterpret_cast<const VkEvent *>( pEvents ),
  4362. static_cast<VkPipelineStageFlags>( srcStageMask ),
  4363. static_cast<VkPipelineStageFlags>( dstStageMask ),
  4364. memoryBarrierCount,
  4365. reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ),
  4366. bufferMemoryBarrierCount,
  4367. reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ),
  4368. imageMemoryBarrierCount,
  4369. reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
  4370. }
  4371. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4372. template <typename Dispatch>
  4373. VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
  4374. VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
  4375. VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
  4376. ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
  4377. ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
  4378. ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
  4379. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4380. {
  4381. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4382. d.vkCmdWaitEvents( m_commandBuffer,
  4383. events.size(),
  4384. reinterpret_cast<const VkEvent *>( events.data() ),
  4385. static_cast<VkPipelineStageFlags>( srcStageMask ),
  4386. static_cast<VkPipelineStageFlags>( dstStageMask ),
  4387. memoryBarriers.size(),
  4388. reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ),
  4389. bufferMemoryBarriers.size(),
  4390. reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ),
  4391. imageMemoryBarriers.size(),
  4392. reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
  4393. }
  4394. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4395. template <typename Dispatch>
  4396. VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
  4397. VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
  4398. VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
  4399. uint32_t memoryBarrierCount,
  4400. const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,
  4401. uint32_t bufferMemoryBarrierCount,
  4402. const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
  4403. uint32_t imageMemoryBarrierCount,
  4404. const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
  4405. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4406. {
  4407. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4408. d.vkCmdPipelineBarrier( m_commandBuffer,
  4409. static_cast<VkPipelineStageFlags>( srcStageMask ),
  4410. static_cast<VkPipelineStageFlags>( dstStageMask ),
  4411. static_cast<VkDependencyFlags>( dependencyFlags ),
  4412. memoryBarrierCount,
  4413. reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ),
  4414. bufferMemoryBarrierCount,
  4415. reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ),
  4416. imageMemoryBarrierCount,
  4417. reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
  4418. }
  4419. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4420. template <typename Dispatch>
  4421. VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
  4422. VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
  4423. VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
  4424. ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
  4425. ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
  4426. ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
  4427. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4428. {
  4429. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4430. d.vkCmdPipelineBarrier( m_commandBuffer,
  4431. static_cast<VkPipelineStageFlags>( srcStageMask ),
  4432. static_cast<VkPipelineStageFlags>( dstStageMask ),
  4433. static_cast<VkDependencyFlags>( dependencyFlags ),
  4434. memoryBarriers.size(),
  4435. reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ),
  4436. bufferMemoryBarriers.size(),
  4437. reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ),
  4438. imageMemoryBarriers.size(),
  4439. reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
  4440. }
  4441. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4442. template <typename Dispatch>
  4443. VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  4444. uint32_t query,
  4445. VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
  4446. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4447. {
  4448. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4449. d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
  4450. }
  4451. template <typename Dispatch>
  4452. VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4453. {
  4454. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4455. d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
  4456. }
  4457. template <typename Dispatch>
  4458. VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  4459. uint32_t firstQuery,
  4460. uint32_t queryCount,
  4461. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4462. {
  4463. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4464. d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
  4465. }
  4466. template <typename Dispatch>
  4467. VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
  4468. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  4469. uint32_t query,
  4470. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4471. {
  4472. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4473. d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
  4474. }
  4475. template <typename Dispatch>
  4476. VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  4477. uint32_t firstQuery,
  4478. uint32_t queryCount,
  4479. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4480. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  4481. VULKAN_HPP_NAMESPACE::DeviceSize stride,
  4482. VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
  4483. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4484. {
  4485. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4486. d.vkCmdCopyQueryPoolResults( m_commandBuffer,
  4487. static_cast<VkQueryPool>( queryPool ),
  4488. firstQuery,
  4489. queryCount,
  4490. static_cast<VkBuffer>( dstBuffer ),
  4491. static_cast<VkDeviceSize>( dstOffset ),
  4492. static_cast<VkDeviceSize>( stride ),
  4493. static_cast<VkQueryResultFlags>( flags ) );
  4494. }
  4495. template <typename Dispatch>
  4496. VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  4497. VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
  4498. uint32_t offset,
  4499. uint32_t size,
  4500. const void * pValues,
  4501. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4502. {
  4503. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4504. d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
  4505. }
  4506. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4507. template <typename ValuesType, typename Dispatch>
  4508. VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  4509. VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
  4510. uint32_t offset,
  4511. ArrayProxy<const ValuesType> const & values,
  4512. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4513. {
  4514. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4515. d.vkCmdPushConstants( m_commandBuffer,
  4516. static_cast<VkPipelineLayout>( layout ),
  4517. static_cast<VkShaderStageFlags>( stageFlags ),
  4518. offset,
  4519. values.size() * sizeof( ValuesType ),
  4520. reinterpret_cast<const void *>( values.data() ) );
  4521. }
  4522. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4523. template <typename Dispatch>
  4524. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
  4525. VULKAN_HPP_NAMESPACE::SubpassContents contents,
  4526. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4527. {
  4528. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4529. d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
  4530. }
  4531. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4532. template <typename Dispatch>
  4533. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
  4534. VULKAN_HPP_NAMESPACE::SubpassContents contents,
  4535. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4536. {
  4537. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4538. d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
  4539. }
  4540. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4541. template <typename Dispatch>
  4542. VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4543. {
  4544. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4545. d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
  4546. }
  4547. template <typename Dispatch>
  4548. VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4549. {
  4550. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4551. d.vkCmdEndRenderPass( m_commandBuffer );
  4552. }
  4553. template <typename Dispatch>
  4554. VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount,
  4555. const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
  4556. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4557. {
  4558. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4559. d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
  4560. }
  4561. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4562. template <typename Dispatch>
  4563. VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
  4564. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4565. {
  4566. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4567. d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
  4568. }
  4569. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4570. //=== VK_VERSION_1_1 ===
  4571. template <typename Dispatch>
  4572. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, Dispatch const & d ) VULKAN_HPP_NOEXCEPT
  4573. {
  4574. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4575. return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) );
  4576. }
  4577. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4578. template <typename Dispatch>
  4579. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d )
  4580. {
  4581. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4582. uint32_t apiVersion;
  4583. VkResult result = d.vkEnumerateInstanceVersion( &apiVersion );
  4584. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" );
  4585. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), apiVersion );
  4586. }
  4587. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4588. template <typename Dispatch>
  4589. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount,
  4590. const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
  4591. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4592. {
  4593. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4594. return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
  4595. }
  4596. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4597. template <typename Dispatch>
  4598. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  4599. Device::bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const
  4600. {
  4601. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4602. VkResult result = d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) );
  4603. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
  4604. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  4605. }
  4606. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4607. template <typename Dispatch>
  4608. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount,
  4609. const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
  4610. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4611. {
  4612. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4613. return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
  4614. }
  4615. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4616. template <typename Dispatch>
  4617. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  4618. Device::bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const
  4619. {
  4620. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4621. VkResult result = d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) );
  4622. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
  4623. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  4624. }
  4625. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4626. template <typename Dispatch>
  4627. VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex,
  4628. uint32_t localDeviceIndex,
  4629. uint32_t remoteDeviceIndex,
  4630. VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
  4631. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4632. {
  4633. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4634. d.vkGetDeviceGroupPeerMemoryFeatures(
  4635. m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
  4636. }
  4637. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4638. template <typename Dispatch>
  4639. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures(
  4640. uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4641. {
  4642. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4643. VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
  4644. d.vkGetDeviceGroupPeerMemoryFeatures(
  4645. m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
  4646. return peerMemoryFeatures;
  4647. }
  4648. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4649. template <typename Dispatch>
  4650. VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4651. {
  4652. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4653. d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
  4654. }
  4655. template <typename Dispatch>
  4656. VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX,
  4657. uint32_t baseGroupY,
  4658. uint32_t baseGroupZ,
  4659. uint32_t groupCountX,
  4660. uint32_t groupCountY,
  4661. uint32_t groupCountZ,
  4662. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4663. {
  4664. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4665. d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
  4666. }
  4667. template <typename Dispatch>
  4668. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  4669. Instance::enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount,
  4670. VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
  4671. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4672. {
  4673. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4674. return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups(
  4675. m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
  4676. }
  4677. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4678. template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
  4679. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  4680. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
  4681. Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const
  4682. {
  4683. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4684. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
  4685. uint32_t physicalDeviceGroupCount;
  4686. VkResult result;
  4687. do
  4688. {
  4689. result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr );
  4690. if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
  4691. {
  4692. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  4693. result = d.vkEnumeratePhysicalDeviceGroups(
  4694. m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
  4695. }
  4696. } while ( result == VK_INCOMPLETE );
  4697. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
  4698. VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
  4699. if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
  4700. {
  4701. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  4702. }
  4703. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
  4704. }
  4705. template <typename PhysicalDeviceGroupPropertiesAllocator,
  4706. typename Dispatch,
  4707. typename B1,
  4708. typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
  4709. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  4710. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
  4711. Instance::enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
  4712. {
  4713. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4714. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
  4715. physicalDeviceGroupPropertiesAllocator );
  4716. uint32_t physicalDeviceGroupCount;
  4717. VkResult result;
  4718. do
  4719. {
  4720. result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr );
  4721. if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
  4722. {
  4723. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  4724. result = d.vkEnumeratePhysicalDeviceGroups(
  4725. m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
  4726. }
  4727. } while ( result == VK_INCOMPLETE );
  4728. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
  4729. VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
  4730. if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
  4731. {
  4732. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  4733. }
  4734. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
  4735. }
  4736. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4737. template <typename Dispatch>
  4738. VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
  4739. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  4740. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4741. {
  4742. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4743. d.vkGetImageMemoryRequirements2(
  4744. m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  4745. }
  4746. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4747. template <typename Dispatch>
  4748. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  4749. Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4750. {
  4751. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4752. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  4753. d.vkGetImageMemoryRequirements2(
  4754. m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  4755. return memoryRequirements;
  4756. }
  4757. template <typename X, typename Y, typename... Z, typename Dispatch>
  4758. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
  4759. Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4760. {
  4761. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4762. StructureChain<X, Y, Z...> structureChain;
  4763. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  4764. d.vkGetImageMemoryRequirements2(
  4765. m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  4766. return structureChain;
  4767. }
  4768. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4769. template <typename Dispatch>
  4770. VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
  4771. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  4772. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4773. {
  4774. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4775. d.vkGetBufferMemoryRequirements2(
  4776. m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  4777. }
  4778. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4779. template <typename Dispatch>
  4780. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  4781. Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4782. {
  4783. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4784. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  4785. d.vkGetBufferMemoryRequirements2(
  4786. m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  4787. return memoryRequirements;
  4788. }
  4789. template <typename X, typename Y, typename... Z, typename Dispatch>
  4790. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
  4791. Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4792. {
  4793. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4794. StructureChain<X, Y, Z...> structureChain;
  4795. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  4796. d.vkGetBufferMemoryRequirements2(
  4797. m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  4798. return structureChain;
  4799. }
  4800. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4801. template <typename Dispatch>
  4802. VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
  4803. uint32_t * pSparseMemoryRequirementCount,
  4804. VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
  4805. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4806. {
  4807. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4808. d.vkGetImageSparseMemoryRequirements2( m_device,
  4809. reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
  4810. pSparseMemoryRequirementCount,
  4811. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
  4812. }
  4813. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4814. template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
  4815. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
  4816. Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const
  4817. {
  4818. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4819. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
  4820. uint32_t sparseMemoryRequirementCount;
  4821. d.vkGetImageSparseMemoryRequirements2(
  4822. m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
  4823. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  4824. d.vkGetImageSparseMemoryRequirements2( m_device,
  4825. reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
  4826. &sparseMemoryRequirementCount,
  4827. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  4828. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  4829. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  4830. {
  4831. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  4832. }
  4833. return sparseMemoryRequirements;
  4834. }
  4835. template <typename SparseImageMemoryRequirements2Allocator,
  4836. typename Dispatch,
  4837. typename B1,
  4838. typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type>
  4839. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
  4840. Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
  4841. SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
  4842. Dispatch const & d ) const
  4843. {
  4844. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4845. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
  4846. sparseImageMemoryRequirements2Allocator );
  4847. uint32_t sparseMemoryRequirementCount;
  4848. d.vkGetImageSparseMemoryRequirements2(
  4849. m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
  4850. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  4851. d.vkGetImageSparseMemoryRequirements2( m_device,
  4852. reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
  4853. &sparseMemoryRequirementCount,
  4854. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  4855. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  4856. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  4857. {
  4858. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  4859. }
  4860. return sparseMemoryRequirements;
  4861. }
  4862. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4863. template <typename Dispatch>
  4864. VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4865. {
  4866. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4867. d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
  4868. }
  4869. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4870. template <typename Dispatch>
  4871. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
  4872. PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4873. {
  4874. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4875. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
  4876. d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
  4877. return features;
  4878. }
  4879. template <typename X, typename Y, typename... Z, typename Dispatch>
  4880. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4881. {
  4882. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4883. StructureChain<X, Y, Z...> structureChain;
  4884. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
  4885. d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
  4886. return structureChain;
  4887. }
  4888. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4889. template <typename Dispatch>
  4890. VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
  4891. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4892. {
  4893. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4894. d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
  4895. }
  4896. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4897. template <typename Dispatch>
  4898. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
  4899. PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4900. {
  4901. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4902. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
  4903. d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
  4904. return properties;
  4905. }
  4906. template <typename X, typename Y, typename... Z, typename Dispatch>
  4907. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4908. {
  4909. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4910. StructureChain<X, Y, Z...> structureChain;
  4911. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
  4912. d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
  4913. return structureChain;
  4914. }
  4915. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4916. template <typename Dispatch>
  4917. VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
  4918. VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
  4919. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4920. {
  4921. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4922. d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
  4923. }
  4924. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4925. template <typename Dispatch>
  4926. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
  4927. PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4928. {
  4929. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4930. VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
  4931. d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
  4932. return formatProperties;
  4933. }
  4934. template <typename X, typename Y, typename... Z, typename Dispatch>
  4935. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
  4936. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4937. {
  4938. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4939. StructureChain<X, Y, Z...> structureChain;
  4940. VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
  4941. d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
  4942. return structureChain;
  4943. }
  4944. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4945. template <typename Dispatch>
  4946. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  4947. PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
  4948. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
  4949. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4950. {
  4951. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4952. return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
  4953. reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
  4954. reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
  4955. }
  4956. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4957. template <typename Dispatch>
  4958. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
  4959. PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
  4960. {
  4961. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4962. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
  4963. VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
  4964. reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
  4965. reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
  4966. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
  4967. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
  4968. }
  4969. template <typename X, typename Y, typename... Z, typename Dispatch>
  4970. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
  4971. PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
  4972. {
  4973. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4974. StructureChain<X, Y, Z...> structureChain;
  4975. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
  4976. VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
  4977. reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
  4978. reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
  4979. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
  4980. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
  4981. }
  4982. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  4983. template <typename Dispatch>
  4984. VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount,
  4985. VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
  4986. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4987. {
  4988. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4989. d.vkGetPhysicalDeviceQueueFamilyProperties2(
  4990. m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
  4991. }
  4992. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4993. template <typename QueueFamilyProperties2Allocator, typename Dispatch>
  4994. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
  4995. PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
  4996. {
  4997. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4998. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
  4999. uint32_t queueFamilyPropertyCount;
  5000. d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  5001. queueFamilyProperties.resize( queueFamilyPropertyCount );
  5002. d.vkGetPhysicalDeviceQueueFamilyProperties2(
  5003. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  5004. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  5005. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  5006. {
  5007. queueFamilyProperties.resize( queueFamilyPropertyCount );
  5008. }
  5009. return queueFamilyProperties;
  5010. }
  5011. template <typename QueueFamilyProperties2Allocator,
  5012. typename Dispatch,
  5013. typename B1,
  5014. typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties2>::value, int>::type>
  5015. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
  5016. PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const
  5017. {
  5018. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5019. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator );
  5020. uint32_t queueFamilyPropertyCount;
  5021. d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  5022. queueFamilyProperties.resize( queueFamilyPropertyCount );
  5023. d.vkGetPhysicalDeviceQueueFamilyProperties2(
  5024. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  5025. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  5026. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  5027. {
  5028. queueFamilyProperties.resize( queueFamilyPropertyCount );
  5029. }
  5030. return queueFamilyProperties;
  5031. }
  5032. template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
  5033. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
  5034. PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
  5035. {
  5036. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5037. std::vector<StructureChain, StructureChainAllocator> structureChains;
  5038. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
  5039. uint32_t queueFamilyPropertyCount;
  5040. d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  5041. structureChains.resize( queueFamilyPropertyCount );
  5042. queueFamilyProperties.resize( queueFamilyPropertyCount );
  5043. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  5044. {
  5045. queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
  5046. }
  5047. d.vkGetPhysicalDeviceQueueFamilyProperties2(
  5048. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  5049. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  5050. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  5051. {
  5052. structureChains.resize( queueFamilyPropertyCount );
  5053. }
  5054. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  5055. {
  5056. structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
  5057. }
  5058. return structureChains;
  5059. }
  5060. template <typename StructureChain,
  5061. typename StructureChainAllocator,
  5062. typename Dispatch,
  5063. typename B1,
  5064. typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type>
  5065. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
  5066. PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const
  5067. {
  5068. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5069. std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
  5070. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
  5071. uint32_t queueFamilyPropertyCount;
  5072. d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  5073. structureChains.resize( queueFamilyPropertyCount );
  5074. queueFamilyProperties.resize( queueFamilyPropertyCount );
  5075. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  5076. {
  5077. queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
  5078. }
  5079. d.vkGetPhysicalDeviceQueueFamilyProperties2(
  5080. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  5081. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  5082. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  5083. {
  5084. structureChains.resize( queueFamilyPropertyCount );
  5085. }
  5086. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  5087. {
  5088. structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
  5089. }
  5090. return structureChains;
  5091. }
  5092. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5093. template <typename Dispatch>
  5094. VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
  5095. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5096. {
  5097. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5098. d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
  5099. }
  5100. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5101. template <typename Dispatch>
  5102. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
  5103. PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5104. {
  5105. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5106. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
  5107. d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
  5108. return memoryProperties;
  5109. }
  5110. template <typename X, typename Y, typename... Z, typename Dispatch>
  5111. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5112. {
  5113. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5114. StructureChain<X, Y, Z...> structureChain;
  5115. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
  5116. structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
  5117. d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
  5118. return structureChain;
  5119. }
  5120. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5121. template <typename Dispatch>
  5122. VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
  5123. uint32_t * pPropertyCount,
  5124. VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
  5125. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5126. {
  5127. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5128. d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
  5129. reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
  5130. pPropertyCount,
  5131. reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
  5132. }
  5133. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5134. template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
  5135. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
  5136. PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const
  5137. {
  5138. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5139. std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
  5140. uint32_t propertyCount;
  5141. d.vkGetPhysicalDeviceSparseImageFormatProperties2(
  5142. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
  5143. properties.resize( propertyCount );
  5144. d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
  5145. reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
  5146. &propertyCount,
  5147. reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
  5148. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  5149. if ( propertyCount < properties.size() )
  5150. {
  5151. properties.resize( propertyCount );
  5152. }
  5153. return properties;
  5154. }
  5155. template <typename SparseImageFormatProperties2Allocator,
  5156. typename Dispatch,
  5157. typename B1,
  5158. typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties2>::value, int>::type>
  5159. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
  5160. PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
  5161. SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
  5162. Dispatch const & d ) const
  5163. {
  5164. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5165. std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
  5166. uint32_t propertyCount;
  5167. d.vkGetPhysicalDeviceSparseImageFormatProperties2(
  5168. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
  5169. properties.resize( propertyCount );
  5170. d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
  5171. reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
  5172. &propertyCount,
  5173. reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
  5174. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  5175. if ( propertyCount < properties.size() )
  5176. {
  5177. properties.resize( propertyCount );
  5178. }
  5179. return properties;
  5180. }
  5181. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5182. template <typename Dispatch>
  5183. VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  5184. VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
  5185. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5186. {
  5187. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5188. d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
  5189. }
  5190. template <typename Dispatch>
  5191. VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo,
  5192. VULKAN_HPP_NAMESPACE::Queue * pQueue,
  5193. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5194. {
  5195. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5196. d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) );
  5197. }
  5198. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5199. template <typename Dispatch>
  5200. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo,
  5201. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5202. {
  5203. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5204. VULKAN_HPP_NAMESPACE::Queue queue;
  5205. d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) );
  5206. return queue;
  5207. }
  5208. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5209. template <typename Dispatch>
  5210. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  5211. Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
  5212. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5213. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
  5214. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5215. {
  5216. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5217. return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device,
  5218. reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
  5219. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  5220. reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
  5221. }
  5222. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5223. template <typename Dispatch>
  5224. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
  5225. Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
  5226. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5227. Dispatch const & d ) const
  5228. {
  5229. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5230. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
  5231. VkResult result = d.vkCreateSamplerYcbcrConversion(
  5232. m_device,
  5233. reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
  5234. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  5235. reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
  5236. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" );
  5237. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), ycbcrConversion );
  5238. }
  5239. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  5240. template <typename Dispatch>
  5241. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
  5242. Device::createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
  5243. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5244. Dispatch const & d ) const
  5245. {
  5246. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5247. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
  5248. VkResult result = d.vkCreateSamplerYcbcrConversion(
  5249. m_device,
  5250. reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
  5251. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  5252. reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
  5253. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" );
  5254. return createResultValueType(
  5255. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  5256. UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  5257. }
  5258. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  5259. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5260. template <typename Dispatch>
  5261. VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
  5262. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5263. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5264. {
  5265. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5266. d.vkDestroySamplerYcbcrConversion(
  5267. m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  5268. }
  5269. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5270. template <typename Dispatch>
  5271. VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
  5272. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5273. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5274. {
  5275. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5276. d.vkDestroySamplerYcbcrConversion(
  5277. m_device,
  5278. static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
  5279. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  5280. }
  5281. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5282. template <typename Dispatch>
  5283. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
  5284. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5285. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5286. {
  5287. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5288. d.vkDestroySamplerYcbcrConversion(
  5289. m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  5290. }
  5291. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5292. template <typename Dispatch>
  5293. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
  5294. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5295. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5296. {
  5297. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5298. d.vkDestroySamplerYcbcrConversion(
  5299. m_device,
  5300. static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
  5301. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  5302. }
  5303. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5304. template <typename Dispatch>
  5305. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  5306. Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
  5307. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5308. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
  5309. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5310. {
  5311. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5312. return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device,
  5313. reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
  5314. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  5315. reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
  5316. }
  5317. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5318. template <typename Dispatch>
  5319. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
  5320. Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
  5321. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5322. Dispatch const & d ) const
  5323. {
  5324. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5325. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
  5326. VkResult result = d.vkCreateDescriptorUpdateTemplate(
  5327. m_device,
  5328. reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
  5329. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  5330. reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
  5331. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" );
  5332. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorUpdateTemplate );
  5333. }
  5334. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  5335. template <typename Dispatch>
  5336. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
  5337. Device::createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
  5338. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5339. Dispatch const & d ) const
  5340. {
  5341. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5342. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
  5343. VkResult result = d.vkCreateDescriptorUpdateTemplate(
  5344. m_device,
  5345. reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
  5346. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  5347. reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
  5348. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" );
  5349. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  5350. UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>(
  5351. descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  5352. }
  5353. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  5354. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5355. template <typename Dispatch>
  5356. VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  5357. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5358. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5359. {
  5360. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5361. d.vkDestroyDescriptorUpdateTemplate(
  5362. m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  5363. }
  5364. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5365. template <typename Dispatch>
  5366. VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  5367. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5368. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5369. {
  5370. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5371. d.vkDestroyDescriptorUpdateTemplate(
  5372. m_device,
  5373. static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
  5374. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  5375. }
  5376. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5377. template <typename Dispatch>
  5378. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  5379. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5380. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5381. {
  5382. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5383. d.vkDestroyDescriptorUpdateTemplate(
  5384. m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  5385. }
  5386. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5387. template <typename Dispatch>
  5388. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  5389. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5390. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5391. {
  5392. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5393. d.vkDestroyDescriptorUpdateTemplate(
  5394. m_device,
  5395. static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
  5396. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  5397. }
  5398. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5399. template <typename Dispatch>
  5400. VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
  5401. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  5402. const void * pData,
  5403. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5404. {
  5405. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5406. d.vkUpdateDescriptorSetWithTemplate(
  5407. m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
  5408. }
  5409. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5410. template <typename DataType, typename Dispatch>
  5411. VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
  5412. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  5413. DataType const & data,
  5414. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5415. {
  5416. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5417. d.vkUpdateDescriptorSetWithTemplate( m_device,
  5418. static_cast<VkDescriptorSet>( descriptorSet ),
  5419. static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
  5420. reinterpret_cast<const void *>( &data ) );
  5421. }
  5422. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5423. template <typename Dispatch>
  5424. VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
  5425. VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
  5426. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5427. {
  5428. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5429. d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice,
  5430. reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
  5431. reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
  5432. }
  5433. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5434. template <typename Dispatch>
  5435. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties
  5436. PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,
  5437. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5438. {
  5439. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5440. VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
  5441. d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice,
  5442. reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
  5443. reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
  5444. return externalBufferProperties;
  5445. }
  5446. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5447. template <typename Dispatch>
  5448. VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
  5449. VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
  5450. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5451. {
  5452. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5453. d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice,
  5454. reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
  5455. reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
  5456. }
  5457. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5458. template <typename Dispatch>
  5459. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
  5460. PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,
  5461. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5462. {
  5463. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5464. VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
  5465. d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice,
  5466. reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
  5467. reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
  5468. return externalFenceProperties;
  5469. }
  5470. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5471. template <typename Dispatch>
  5472. VULKAN_HPP_INLINE void
  5473. PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
  5474. VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
  5475. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5476. {
  5477. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5478. d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice,
  5479. reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
  5480. reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
  5481. }
  5482. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5483. template <typename Dispatch>
  5484. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
  5485. PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
  5486. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5487. {
  5488. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5489. VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
  5490. d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice,
  5491. reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
  5492. reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
  5493. return externalSemaphoreProperties;
  5494. }
  5495. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5496. template <typename Dispatch>
  5497. VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
  5498. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
  5499. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5500. {
  5501. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5502. d.vkGetDescriptorSetLayoutSupport(
  5503. m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
  5504. }
  5505. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5506. template <typename Dispatch>
  5507. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
  5508. Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
  5509. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5510. {
  5511. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5512. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
  5513. d.vkGetDescriptorSetLayoutSupport(
  5514. m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
  5515. return support;
  5516. }
  5517. template <typename X, typename Y, typename... Z, typename Dispatch>
  5518. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
  5519. Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
  5520. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5521. {
  5522. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5523. StructureChain<X, Y, Z...> structureChain;
  5524. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
  5525. d.vkGetDescriptorSetLayoutSupport(
  5526. m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
  5527. return structureChain;
  5528. }
  5529. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5530. //=== VK_VERSION_1_2 ===
  5531. template <typename Dispatch>
  5532. VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
  5533. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  5534. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  5535. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  5536. uint32_t maxDrawCount,
  5537. uint32_t stride,
  5538. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5539. {
  5540. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5541. d.vkCmdDrawIndirectCount( m_commandBuffer,
  5542. static_cast<VkBuffer>( buffer ),
  5543. static_cast<VkDeviceSize>( offset ),
  5544. static_cast<VkBuffer>( countBuffer ),
  5545. static_cast<VkDeviceSize>( countBufferOffset ),
  5546. maxDrawCount,
  5547. stride );
  5548. }
  5549. template <typename Dispatch>
  5550. VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
  5551. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  5552. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  5553. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  5554. uint32_t maxDrawCount,
  5555. uint32_t stride,
  5556. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5557. {
  5558. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5559. d.vkCmdDrawIndexedIndirectCount( m_commandBuffer,
  5560. static_cast<VkBuffer>( buffer ),
  5561. static_cast<VkDeviceSize>( offset ),
  5562. static_cast<VkBuffer>( countBuffer ),
  5563. static_cast<VkDeviceSize>( countBufferOffset ),
  5564. maxDrawCount,
  5565. stride );
  5566. }
  5567. template <typename Dispatch>
  5568. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
  5569. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5570. VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
  5571. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5572. {
  5573. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5574. return static_cast<Result>( d.vkCreateRenderPass2( m_device,
  5575. reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
  5576. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  5577. reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
  5578. }
  5579. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5580. template <typename Dispatch>
  5581. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
  5582. Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
  5583. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5584. Dispatch const & d ) const
  5585. {
  5586. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5587. VULKAN_HPP_NAMESPACE::RenderPass renderPass;
  5588. VkResult result =
  5589. d.vkCreateRenderPass2( m_device,
  5590. reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
  5591. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  5592. reinterpret_cast<VkRenderPass *>( &renderPass ) );
  5593. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" );
  5594. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
  5595. }
  5596. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  5597. template <typename Dispatch>
  5598. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
  5599. Device::createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
  5600. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5601. Dispatch const & d ) const
  5602. {
  5603. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5604. VULKAN_HPP_NAMESPACE::RenderPass renderPass;
  5605. VkResult result =
  5606. d.vkCreateRenderPass2( m_device,
  5607. reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
  5608. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  5609. reinterpret_cast<VkRenderPass *>( &renderPass ) );
  5610. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" );
  5611. return createResultValueType(
  5612. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  5613. UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  5614. }
  5615. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  5616. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5617. template <typename Dispatch>
  5618. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
  5619. const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
  5620. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5621. {
  5622. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5623. d.vkCmdBeginRenderPass2(
  5624. m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
  5625. }
  5626. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5627. template <typename Dispatch>
  5628. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
  5629. const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
  5630. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5631. {
  5632. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5633. d.vkCmdBeginRenderPass2(
  5634. m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
  5635. }
  5636. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5637. template <typename Dispatch>
  5638. VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
  5639. const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
  5640. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5641. {
  5642. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5643. d.vkCmdNextSubpass2(
  5644. m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
  5645. }
  5646. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5647. template <typename Dispatch>
  5648. VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
  5649. const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
  5650. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5651. {
  5652. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5653. d.vkCmdNextSubpass2(
  5654. m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  5655. }
  5656. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5657. template <typename Dispatch>
  5658. VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
  5659. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5660. {
  5661. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5662. d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
  5663. }
  5664. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5665. template <typename Dispatch>
  5666. VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
  5667. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5668. {
  5669. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5670. d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  5671. }
  5672. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5673. template <typename Dispatch>
  5674. VULKAN_HPP_INLINE void
  5675. Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5676. {
  5677. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5678. d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
  5679. }
  5680. template <typename Dispatch>
  5681. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
  5682. uint64_t * pValue,
  5683. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5684. {
  5685. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5686. return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
  5687. }
  5688. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5689. template <typename Dispatch>
  5690. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
  5691. Dispatch const & d ) const
  5692. {
  5693. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5694. uint64_t value;
  5695. VkResult result = d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value );
  5696. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" );
  5697. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), value );
  5698. }
  5699. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5700. template <typename Dispatch>
  5701. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
  5702. uint64_t timeout,
  5703. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5704. {
  5705. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5706. return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
  5707. }
  5708. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5709. template <typename Dispatch>
  5710. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  5711. Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
  5712. {
  5713. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5714. VkResult result = d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
  5715. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  5716. VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores",
  5717. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
  5718. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  5719. }
  5720. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5721. template <typename Dispatch>
  5722. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,
  5723. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5724. {
  5725. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5726. return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
  5727. }
  5728. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5729. template <typename Dispatch>
  5730. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  5731. Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
  5732. {
  5733. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5734. VkResult result = d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
  5735. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
  5736. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  5737. }
  5738. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5739. template <typename Dispatch>
  5740. VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
  5741. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5742. {
  5743. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5744. return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
  5745. }
  5746. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5747. template <typename Dispatch>
  5748. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
  5749. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5750. {
  5751. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5752. VkDeviceAddress result = d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  5753. return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
  5754. }
  5755. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5756. template <typename Dispatch>
  5757. VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
  5758. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5759. {
  5760. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5761. return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
  5762. }
  5763. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5764. template <typename Dispatch>
  5765. VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
  5766. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5767. {
  5768. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5769. uint64_t result = d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  5770. return result;
  5771. }
  5772. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5773. template <typename Dispatch>
  5774. VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
  5775. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5776. {
  5777. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5778. return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
  5779. }
  5780. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5781. template <typename Dispatch>
  5782. VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,
  5783. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5784. {
  5785. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5786. uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
  5787. return result;
  5788. }
  5789. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5790. //=== VK_VERSION_1_3 ===
  5791. template <typename Dispatch>
  5792. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolProperties( uint32_t * pToolCount,
  5793. VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,
  5794. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5795. {
  5796. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5797. return static_cast<Result>(
  5798. d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
  5799. }
  5800. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5801. template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch>
  5802. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  5803. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
  5804. PhysicalDevice::getToolProperties( Dispatch const & d ) const
  5805. {
  5806. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5807. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties;
  5808. uint32_t toolCount;
  5809. VkResult result;
  5810. do
  5811. {
  5812. result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr );
  5813. if ( ( result == VK_SUCCESS ) && toolCount )
  5814. {
  5815. toolProperties.resize( toolCount );
  5816. result =
  5817. d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
  5818. }
  5819. } while ( result == VK_INCOMPLETE );
  5820. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
  5821. VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
  5822. if ( toolCount < toolProperties.size() )
  5823. {
  5824. toolProperties.resize( toolCount );
  5825. }
  5826. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
  5827. }
  5828. template <typename PhysicalDeviceToolPropertiesAllocator,
  5829. typename Dispatch,
  5830. typename B1,
  5831. typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceToolProperties>::value, int>::type>
  5832. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  5833. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
  5834. PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const
  5835. {
  5836. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5837. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties(
  5838. physicalDeviceToolPropertiesAllocator );
  5839. uint32_t toolCount;
  5840. VkResult result;
  5841. do
  5842. {
  5843. result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr );
  5844. if ( ( result == VK_SUCCESS ) && toolCount )
  5845. {
  5846. toolProperties.resize( toolCount );
  5847. result =
  5848. d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
  5849. }
  5850. } while ( result == VK_INCOMPLETE );
  5851. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
  5852. VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
  5853. if ( toolCount < toolProperties.size() )
  5854. {
  5855. toolProperties.resize( toolCount );
  5856. }
  5857. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
  5858. }
  5859. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5860. template <typename Dispatch>
  5861. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,
  5862. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5863. VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,
  5864. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5865. {
  5866. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5867. return static_cast<Result>( d.vkCreatePrivateDataSlot( m_device,
  5868. reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ),
  5869. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  5870. reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
  5871. }
  5872. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5873. template <typename Dispatch>
  5874. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type
  5875. Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
  5876. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5877. Dispatch const & d ) const
  5878. {
  5879. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5880. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
  5881. VkResult result =
  5882. d.vkCreatePrivateDataSlot( m_device,
  5883. reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
  5884. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  5885. reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
  5886. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" );
  5887. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), privateDataSlot );
  5888. }
  5889. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  5890. template <typename Dispatch>
  5891. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type
  5892. Device::createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
  5893. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5894. Dispatch const & d ) const
  5895. {
  5896. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5897. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
  5898. VkResult result =
  5899. d.vkCreatePrivateDataSlot( m_device,
  5900. reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
  5901. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  5902. reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
  5903. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" );
  5904. return createResultValueType(
  5905. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  5906. UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  5907. }
  5908. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  5909. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5910. template <typename Dispatch>
  5911. VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  5912. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5913. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5914. {
  5915. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5916. d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  5917. }
  5918. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5919. template <typename Dispatch>
  5920. VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  5921. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5922. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5923. {
  5924. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5925. d.vkDestroyPrivateDataSlot(
  5926. m_device,
  5927. static_cast<VkPrivateDataSlot>( privateDataSlot ),
  5928. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  5929. }
  5930. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5931. template <typename Dispatch>
  5932. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  5933. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5934. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5935. {
  5936. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5937. d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  5938. }
  5939. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5940. template <typename Dispatch>
  5941. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  5942. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5943. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5944. {
  5945. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5946. d.vkDestroyPrivateDataSlot(
  5947. m_device,
  5948. static_cast<VkPrivateDataSlot>( privateDataSlot ),
  5949. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  5950. }
  5951. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5952. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5953. template <typename Dispatch>
  5954. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType,
  5955. uint64_t objectHandle,
  5956. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  5957. uint64_t data,
  5958. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5959. {
  5960. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5961. return static_cast<Result>(
  5962. d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
  5963. }
  5964. #else
  5965. template <typename Dispatch>
  5966. VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType,
  5967. uint64_t objectHandle,
  5968. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  5969. uint64_t data,
  5970. Dispatch const & d ) const
  5971. {
  5972. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5973. VkResult result =
  5974. d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
  5975. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" );
  5976. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  5977. }
  5978. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5979. template <typename Dispatch>
  5980. VULKAN_HPP_INLINE void Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType,
  5981. uint64_t objectHandle,
  5982. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  5983. uint64_t * pData,
  5984. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5985. {
  5986. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5987. d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
  5988. }
  5989. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5990. template <typename Dispatch>
  5991. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType,
  5992. uint64_t objectHandle,
  5993. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  5994. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5995. {
  5996. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5997. uint64_t data;
  5998. d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
  5999. return data;
  6000. }
  6001. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6002. template <typename Dispatch>
  6003. VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event,
  6004. const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
  6005. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6006. {
  6007. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6008. d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
  6009. }
  6010. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6011. template <typename Dispatch>
  6012. VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event,
  6013. const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
  6014. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6015. {
  6016. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6017. d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
  6018. }
  6019. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6020. template <typename Dispatch>
  6021. VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event,
  6022. VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask,
  6023. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6024. {
  6025. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6026. d.vkCmdResetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
  6027. }
  6028. template <typename Dispatch>
  6029. VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( uint32_t eventCount,
  6030. const VULKAN_HPP_NAMESPACE::Event * pEvents,
  6031. const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,
  6032. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6033. {
  6034. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6035. d.vkCmdWaitEvents2(
  6036. m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
  6037. }
  6038. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6039. template <typename Dispatch>
  6040. VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
  6041. ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,
  6042. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  6043. {
  6044. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6045. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  6046. VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
  6047. # else
  6048. if ( events.size() != dependencyInfos.size() )
  6049. {
  6050. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" );
  6051. }
  6052. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  6053. d.vkCmdWaitEvents2( m_commandBuffer,
  6054. events.size(),
  6055. reinterpret_cast<const VkEvent *>( events.data() ),
  6056. reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
  6057. }
  6058. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6059. template <typename Dispatch>
  6060. VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
  6061. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6062. {
  6063. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6064. d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
  6065. }
  6066. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6067. template <typename Dispatch>
  6068. VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
  6069. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6070. {
  6071. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6072. d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
  6073. }
  6074. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6075. template <typename Dispatch>
  6076. VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
  6077. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  6078. uint32_t query,
  6079. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6080. {
  6081. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6082. d.vkCmdWriteTimestamp2( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
  6083. }
  6084. template <typename Dispatch>
  6085. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2( uint32_t submitCount,
  6086. const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,
  6087. VULKAN_HPP_NAMESPACE::Fence fence,
  6088. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6089. {
  6090. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6091. return static_cast<Result>( d.vkQueueSubmit2( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
  6092. }
  6093. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6094. template <typename Dispatch>
  6095. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  6096. Queue::submit2( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
  6097. {
  6098. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6099. VkResult result = d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) );
  6100. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" );
  6101. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  6102. }
  6103. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6104. template <typename Dispatch>
  6105. VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,
  6106. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6107. {
  6108. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6109. d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
  6110. }
  6111. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6112. template <typename Dispatch>
  6113. VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,
  6114. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6115. {
  6116. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6117. d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
  6118. }
  6119. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6120. template <typename Dispatch>
  6121. VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6122. {
  6123. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6124. d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
  6125. }
  6126. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6127. template <typename Dispatch>
  6128. VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6129. {
  6130. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6131. d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
  6132. }
  6133. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6134. template <typename Dispatch>
  6135. VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,
  6136. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6137. {
  6138. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6139. d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
  6140. }
  6141. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6142. template <typename Dispatch>
  6143. VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,
  6144. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6145. {
  6146. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6147. d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
  6148. }
  6149. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6150. template <typename Dispatch>
  6151. VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,
  6152. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6153. {
  6154. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6155. d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
  6156. }
  6157. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6158. template <typename Dispatch>
  6159. VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,
  6160. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6161. {
  6162. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6163. d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
  6164. }
  6165. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6166. template <typename Dispatch>
  6167. VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6168. {
  6169. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6170. d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
  6171. }
  6172. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6173. template <typename Dispatch>
  6174. VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6175. {
  6176. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6177. d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
  6178. }
  6179. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6180. template <typename Dispatch>
  6181. VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,
  6182. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6183. {
  6184. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6185. d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
  6186. }
  6187. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6188. template <typename Dispatch>
  6189. VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,
  6190. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6191. {
  6192. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6193. d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
  6194. }
  6195. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6196. template <typename Dispatch>
  6197. VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,
  6198. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6199. {
  6200. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6201. d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
  6202. }
  6203. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6204. template <typename Dispatch>
  6205. VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,
  6206. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6207. {
  6208. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6209. d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
  6210. }
  6211. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6212. template <typename Dispatch>
  6213. VULKAN_HPP_INLINE void CommandBuffer::endRendering( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6214. {
  6215. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6216. d.vkCmdEndRendering( m_commandBuffer );
  6217. }
  6218. template <typename Dispatch>
  6219. VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6220. {
  6221. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6222. d.vkCmdSetCullMode( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
  6223. }
  6224. template <typename Dispatch>
  6225. VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6226. {
  6227. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6228. d.vkCmdSetFrontFace( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
  6229. }
  6230. template <typename Dispatch>
  6231. VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,
  6232. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6233. {
  6234. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6235. d.vkCmdSetPrimitiveTopology( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
  6236. }
  6237. template <typename Dispatch>
  6238. VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( uint32_t viewportCount,
  6239. const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
  6240. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6241. {
  6242. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6243. d.vkCmdSetViewportWithCount( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
  6244. }
  6245. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6246. template <typename Dispatch>
  6247. VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
  6248. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6249. {
  6250. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6251. d.vkCmdSetViewportWithCount( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
  6252. }
  6253. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6254. template <typename Dispatch>
  6255. VULKAN_HPP_INLINE void
  6256. CommandBuffer::setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6257. {
  6258. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6259. d.vkCmdSetScissorWithCount( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
  6260. }
  6261. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6262. template <typename Dispatch>
  6263. VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
  6264. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6265. {
  6266. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6267. d.vkCmdSetScissorWithCount( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
  6268. }
  6269. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6270. template <typename Dispatch>
  6271. VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding,
  6272. uint32_t bindingCount,
  6273. const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
  6274. const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
  6275. const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
  6276. const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,
  6277. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6278. {
  6279. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6280. d.vkCmdBindVertexBuffers2( m_commandBuffer,
  6281. firstBinding,
  6282. bindingCount,
  6283. reinterpret_cast<const VkBuffer *>( pBuffers ),
  6284. reinterpret_cast<const VkDeviceSize *>( pOffsets ),
  6285. reinterpret_cast<const VkDeviceSize *>( pSizes ),
  6286. reinterpret_cast<const VkDeviceSize *>( pStrides ) );
  6287. }
  6288. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6289. template <typename Dispatch>
  6290. VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding,
  6291. ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
  6292. ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
  6293. ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
  6294. ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,
  6295. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  6296. {
  6297. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6298. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  6299. VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
  6300. VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
  6301. VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
  6302. # else
  6303. if ( buffers.size() != offsets.size() )
  6304. {
  6305. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" );
  6306. }
  6307. if ( !sizes.empty() && buffers.size() != sizes.size() )
  6308. {
  6309. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" );
  6310. }
  6311. if ( !strides.empty() && buffers.size() != strides.size() )
  6312. {
  6313. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" );
  6314. }
  6315. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  6316. d.vkCmdBindVertexBuffers2( m_commandBuffer,
  6317. firstBinding,
  6318. buffers.size(),
  6319. reinterpret_cast<const VkBuffer *>( buffers.data() ),
  6320. reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
  6321. reinterpret_cast<const VkDeviceSize *>( sizes.data() ),
  6322. reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
  6323. }
  6324. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6325. template <typename Dispatch>
  6326. VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6327. {
  6328. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6329. d.vkCmdSetDepthTestEnable( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
  6330. }
  6331. template <typename Dispatch>
  6332. VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6333. {
  6334. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6335. d.vkCmdSetDepthWriteEnable( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
  6336. }
  6337. template <typename Dispatch>
  6338. VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6339. {
  6340. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6341. d.vkCmdSetDepthCompareOp( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
  6342. }
  6343. template <typename Dispatch>
  6344. VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,
  6345. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6346. {
  6347. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6348. d.vkCmdSetDepthBoundsTestEnable( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
  6349. }
  6350. template <typename Dispatch>
  6351. VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6352. {
  6353. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6354. d.vkCmdSetStencilTestEnable( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
  6355. }
  6356. template <typename Dispatch>
  6357. VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
  6358. VULKAN_HPP_NAMESPACE::StencilOp failOp,
  6359. VULKAN_HPP_NAMESPACE::StencilOp passOp,
  6360. VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
  6361. VULKAN_HPP_NAMESPACE::CompareOp compareOp,
  6362. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6363. {
  6364. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6365. d.vkCmdSetStencilOp( m_commandBuffer,
  6366. static_cast<VkStencilFaceFlags>( faceMask ),
  6367. static_cast<VkStencilOp>( failOp ),
  6368. static_cast<VkStencilOp>( passOp ),
  6369. static_cast<VkStencilOp>( depthFailOp ),
  6370. static_cast<VkCompareOp>( compareOp ) );
  6371. }
  6372. template <typename Dispatch>
  6373. VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,
  6374. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6375. {
  6376. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6377. d.vkCmdSetRasterizerDiscardEnable( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
  6378. }
  6379. template <typename Dispatch>
  6380. VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6381. {
  6382. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6383. d.vkCmdSetDepthBiasEnable( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
  6384. }
  6385. template <typename Dispatch>
  6386. VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,
  6387. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6388. {
  6389. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6390. d.vkCmdSetPrimitiveRestartEnable( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
  6391. }
  6392. template <typename Dispatch>
  6393. VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,
  6394. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  6395. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6396. {
  6397. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6398. d.vkGetDeviceBufferMemoryRequirements(
  6399. m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  6400. }
  6401. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6402. template <typename Dispatch>
  6403. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  6404. Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6405. {
  6406. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6407. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  6408. d.vkGetDeviceBufferMemoryRequirements(
  6409. m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  6410. return memoryRequirements;
  6411. }
  6412. template <typename X, typename Y, typename... Z, typename Dispatch>
  6413. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
  6414. Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6415. {
  6416. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6417. StructureChain<X, Y, Z...> structureChain;
  6418. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  6419. d.vkGetDeviceBufferMemoryRequirements(
  6420. m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  6421. return structureChain;
  6422. }
  6423. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6424. template <typename Dispatch>
  6425. VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
  6426. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  6427. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6428. {
  6429. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6430. d.vkGetDeviceImageMemoryRequirements(
  6431. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  6432. }
  6433. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6434. template <typename Dispatch>
  6435. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  6436. Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6437. {
  6438. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6439. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  6440. d.vkGetDeviceImageMemoryRequirements(
  6441. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  6442. return memoryRequirements;
  6443. }
  6444. template <typename X, typename Y, typename... Z, typename Dispatch>
  6445. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
  6446. Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6447. {
  6448. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6449. StructureChain<X, Y, Z...> structureChain;
  6450. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  6451. d.vkGetDeviceImageMemoryRequirements(
  6452. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  6453. return structureChain;
  6454. }
  6455. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6456. template <typename Dispatch>
  6457. VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
  6458. uint32_t * pSparseMemoryRequirementCount,
  6459. VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
  6460. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6461. {
  6462. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6463. d.vkGetDeviceImageSparseMemoryRequirements( m_device,
  6464. reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
  6465. pSparseMemoryRequirementCount,
  6466. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
  6467. }
  6468. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6469. template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
  6470. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
  6471. Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const
  6472. {
  6473. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6474. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
  6475. uint32_t sparseMemoryRequirementCount;
  6476. d.vkGetDeviceImageSparseMemoryRequirements(
  6477. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
  6478. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  6479. d.vkGetDeviceImageSparseMemoryRequirements( m_device,
  6480. reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
  6481. &sparseMemoryRequirementCount,
  6482. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  6483. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  6484. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  6485. {
  6486. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  6487. }
  6488. return sparseMemoryRequirements;
  6489. }
  6490. template <typename SparseImageMemoryRequirements2Allocator,
  6491. typename Dispatch,
  6492. typename B1,
  6493. typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type>
  6494. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
  6495. Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
  6496. SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
  6497. Dispatch const & d ) const
  6498. {
  6499. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6500. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
  6501. sparseImageMemoryRequirements2Allocator );
  6502. uint32_t sparseMemoryRequirementCount;
  6503. d.vkGetDeviceImageSparseMemoryRequirements(
  6504. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
  6505. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  6506. d.vkGetDeviceImageSparseMemoryRequirements( m_device,
  6507. reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
  6508. &sparseMemoryRequirementCount,
  6509. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  6510. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  6511. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  6512. {
  6513. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  6514. }
  6515. return sparseMemoryRequirements;
  6516. }
  6517. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6518. //=== VK_KHR_surface ===
  6519. template <typename Dispatch>
  6520. VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6521. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  6522. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6523. {
  6524. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6525. d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  6526. }
  6527. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6528. template <typename Dispatch>
  6529. VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6530. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  6531. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6532. {
  6533. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6534. d.vkDestroySurfaceKHR( m_instance,
  6535. static_cast<VkSurfaceKHR>( surface ),
  6536. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  6537. }
  6538. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6539. template <typename Dispatch>
  6540. VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6541. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  6542. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6543. {
  6544. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6545. d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  6546. }
  6547. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6548. template <typename Dispatch>
  6549. VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6550. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  6551. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6552. {
  6553. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6554. d.vkDestroySurfaceKHR( m_instance,
  6555. static_cast<VkSurfaceKHR>( surface ),
  6556. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  6557. }
  6558. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6559. template <typename Dispatch>
  6560. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex,
  6561. VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6562. VULKAN_HPP_NAMESPACE::Bool32 * pSupported,
  6563. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6564. {
  6565. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6566. return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR(
  6567. m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( pSupported ) ) );
  6568. }
  6569. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6570. template <typename Dispatch>
  6571. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type
  6572. PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  6573. {
  6574. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6575. VULKAN_HPP_NAMESPACE::Bool32 supported;
  6576. VkResult result = d.vkGetPhysicalDeviceSurfaceSupportKHR(
  6577. m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( &supported ) );
  6578. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" );
  6579. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), supported );
  6580. }
  6581. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6582. template <typename Dispatch>
  6583. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6584. VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,
  6585. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6586. {
  6587. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6588. return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
  6589. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) );
  6590. }
  6591. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6592. template <typename Dispatch>
  6593. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type
  6594. PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  6595. {
  6596. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6597. VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
  6598. VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
  6599. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) );
  6600. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" );
  6601. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
  6602. }
  6603. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6604. template <typename Dispatch>
  6605. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6606. uint32_t * pSurfaceFormatCount,
  6607. VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,
  6608. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6609. {
  6610. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6611. return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR(
  6612. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) );
  6613. }
  6614. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6615. template <typename SurfaceFormatKHRAllocator, typename Dispatch>
  6616. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
  6617. PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  6618. {
  6619. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6620. std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats;
  6621. uint32_t surfaceFormatCount;
  6622. VkResult result;
  6623. do
  6624. {
  6625. result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr );
  6626. if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
  6627. {
  6628. surfaceFormats.resize( surfaceFormatCount );
  6629. result = d.vkGetPhysicalDeviceSurfaceFormatsKHR(
  6630. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) );
  6631. }
  6632. } while ( result == VK_INCOMPLETE );
  6633. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
  6634. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  6635. if ( surfaceFormatCount < surfaceFormats.size() )
  6636. {
  6637. surfaceFormats.resize( surfaceFormatCount );
  6638. }
  6639. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
  6640. }
  6641. template <typename SurfaceFormatKHRAllocator,
  6642. typename Dispatch,
  6643. typename B1,
  6644. typename std::enable_if<std::is_same<typename B1::value_type, SurfaceFormatKHR>::value, int>::type>
  6645. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
  6646. PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6647. SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator,
  6648. Dispatch const & d ) const
  6649. {
  6650. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6651. std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator );
  6652. uint32_t surfaceFormatCount;
  6653. VkResult result;
  6654. do
  6655. {
  6656. result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr );
  6657. if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
  6658. {
  6659. surfaceFormats.resize( surfaceFormatCount );
  6660. result = d.vkGetPhysicalDeviceSurfaceFormatsKHR(
  6661. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) );
  6662. }
  6663. } while ( result == VK_INCOMPLETE );
  6664. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
  6665. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  6666. if ( surfaceFormatCount < surfaceFormats.size() )
  6667. {
  6668. surfaceFormats.resize( surfaceFormatCount );
  6669. }
  6670. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
  6671. }
  6672. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6673. template <typename Dispatch>
  6674. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6675. uint32_t * pPresentModeCount,
  6676. VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
  6677. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6678. {
  6679. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6680. return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR(
  6681. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
  6682. }
  6683. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6684. template <typename PresentModeKHRAllocator, typename Dispatch>
  6685. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
  6686. PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  6687. {
  6688. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6689. std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes;
  6690. uint32_t presentModeCount;
  6691. VkResult result;
  6692. do
  6693. {
  6694. result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr );
  6695. if ( ( result == VK_SUCCESS ) && presentModeCount )
  6696. {
  6697. presentModes.resize( presentModeCount );
  6698. result = d.vkGetPhysicalDeviceSurfacePresentModesKHR(
  6699. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
  6700. }
  6701. } while ( result == VK_INCOMPLETE );
  6702. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
  6703. VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
  6704. if ( presentModeCount < presentModes.size() )
  6705. {
  6706. presentModes.resize( presentModeCount );
  6707. }
  6708. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
  6709. }
  6710. template <typename PresentModeKHRAllocator,
  6711. typename Dispatch,
  6712. typename B1,
  6713. typename std::enable_if<std::is_same<typename B1::value_type, PresentModeKHR>::value, int>::type>
  6714. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
  6715. PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6716. PresentModeKHRAllocator & presentModeKHRAllocator,
  6717. Dispatch const & d ) const
  6718. {
  6719. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6720. std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
  6721. uint32_t presentModeCount;
  6722. VkResult result;
  6723. do
  6724. {
  6725. result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr );
  6726. if ( ( result == VK_SUCCESS ) && presentModeCount )
  6727. {
  6728. presentModes.resize( presentModeCount );
  6729. result = d.vkGetPhysicalDeviceSurfacePresentModesKHR(
  6730. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
  6731. }
  6732. } while ( result == VK_INCOMPLETE );
  6733. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
  6734. VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
  6735. if ( presentModeCount < presentModes.size() )
  6736. {
  6737. presentModes.resize( presentModeCount );
  6738. }
  6739. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
  6740. }
  6741. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6742. //=== VK_KHR_swapchain ===
  6743. template <typename Dispatch>
  6744. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,
  6745. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  6746. VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain,
  6747. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6748. {
  6749. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6750. return static_cast<Result>( d.vkCreateSwapchainKHR( m_device,
  6751. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ),
  6752. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  6753. reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) );
  6754. }
  6755. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6756. template <typename Dispatch>
  6757. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type
  6758. Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
  6759. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  6760. Dispatch const & d ) const
  6761. {
  6762. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6763. VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
  6764. VkResult result =
  6765. d.vkCreateSwapchainKHR( m_device,
  6766. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
  6767. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  6768. reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
  6769. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" );
  6770. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchain );
  6771. }
  6772. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  6773. template <typename Dispatch>
  6774. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type
  6775. Device::createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
  6776. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  6777. Dispatch const & d ) const
  6778. {
  6779. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6780. VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
  6781. VkResult result =
  6782. d.vkCreateSwapchainKHR( m_device,
  6783. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
  6784. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  6785. reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
  6786. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" );
  6787. return createResultValueType(
  6788. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  6789. UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  6790. }
  6791. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  6792. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6793. template <typename Dispatch>
  6794. VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  6795. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  6796. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6797. {
  6798. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6799. d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  6800. }
  6801. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6802. template <typename Dispatch>
  6803. VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  6804. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  6805. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6806. {
  6807. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6808. d.vkDestroySwapchainKHR( m_device,
  6809. static_cast<VkSwapchainKHR>( swapchain ),
  6810. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  6811. }
  6812. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6813. template <typename Dispatch>
  6814. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  6815. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  6816. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6817. {
  6818. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6819. d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  6820. }
  6821. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6822. template <typename Dispatch>
  6823. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  6824. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  6825. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6826. {
  6827. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6828. d.vkDestroySwapchainKHR( m_device,
  6829. static_cast<VkSwapchainKHR>( swapchain ),
  6830. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  6831. }
  6832. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6833. template <typename Dispatch>
  6834. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  6835. uint32_t * pSwapchainImageCount,
  6836. VULKAN_HPP_NAMESPACE::Image * pSwapchainImages,
  6837. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6838. {
  6839. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6840. return static_cast<Result>(
  6841. d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage *>( pSwapchainImages ) ) );
  6842. }
  6843. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6844. template <typename ImageAllocator, typename Dispatch>
  6845. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type
  6846. Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  6847. {
  6848. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6849. std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages;
  6850. uint32_t swapchainImageCount;
  6851. VkResult result;
  6852. do
  6853. {
  6854. result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr );
  6855. if ( ( result == VK_SUCCESS ) && swapchainImageCount )
  6856. {
  6857. swapchainImages.resize( swapchainImageCount );
  6858. result = d.vkGetSwapchainImagesKHR(
  6859. m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) );
  6860. }
  6861. } while ( result == VK_INCOMPLETE );
  6862. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
  6863. VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
  6864. if ( swapchainImageCount < swapchainImages.size() )
  6865. {
  6866. swapchainImages.resize( swapchainImageCount );
  6867. }
  6868. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchainImages );
  6869. }
  6870. template <typename ImageAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, Image>::value, int>::type>
  6871. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type
  6872. Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d ) const
  6873. {
  6874. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6875. std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages( imageAllocator );
  6876. uint32_t swapchainImageCount;
  6877. VkResult result;
  6878. do
  6879. {
  6880. result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr );
  6881. if ( ( result == VK_SUCCESS ) && swapchainImageCount )
  6882. {
  6883. swapchainImages.resize( swapchainImageCount );
  6884. result = d.vkGetSwapchainImagesKHR(
  6885. m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) );
  6886. }
  6887. } while ( result == VK_INCOMPLETE );
  6888. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
  6889. VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
  6890. if ( swapchainImageCount < swapchainImages.size() )
  6891. {
  6892. swapchainImages.resize( swapchainImageCount );
  6893. }
  6894. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchainImages );
  6895. }
  6896. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6897. template <typename Dispatch>
  6898. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  6899. uint64_t timeout,
  6900. VULKAN_HPP_NAMESPACE::Semaphore semaphore,
  6901. VULKAN_HPP_NAMESPACE::Fence fence,
  6902. uint32_t * pImageIndex,
  6903. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6904. {
  6905. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6906. return static_cast<Result>( d.vkAcquireNextImageKHR(
  6907. m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
  6908. }
  6909. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6910. template <typename Dispatch>
  6911. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  6912. uint64_t timeout,
  6913. VULKAN_HPP_NAMESPACE::Semaphore semaphore,
  6914. VULKAN_HPP_NAMESPACE::Fence fence,
  6915. Dispatch const & d ) const
  6916. {
  6917. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6918. uint32_t imageIndex;
  6919. VkResult result = d.vkAcquireNextImageKHR(
  6920. m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex );
  6921. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  6922. VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR",
  6923. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  6924. VULKAN_HPP_NAMESPACE::Result::eTimeout,
  6925. VULKAN_HPP_NAMESPACE::Result::eNotReady,
  6926. VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  6927. return ResultValue<uint32_t>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
  6928. }
  6929. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6930. template <typename Dispatch>
  6931. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo,
  6932. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6933. {
  6934. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6935. return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) );
  6936. }
  6937. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6938. template <typename Dispatch>
  6939. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo,
  6940. Dispatch const & d ) const
  6941. {
  6942. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6943. VkResult result = d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) );
  6944. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  6945. VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR",
  6946. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  6947. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  6948. }
  6949. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6950. template <typename Dispatch>
  6951. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR(
  6952. VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6953. {
  6954. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6955. return static_cast<Result>(
  6956. d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) );
  6957. }
  6958. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6959. template <typename Dispatch>
  6960. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type
  6961. Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const
  6962. {
  6963. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6964. VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
  6965. VkResult result =
  6966. d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) );
  6967. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" );
  6968. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), deviceGroupPresentCapabilities );
  6969. }
  6970. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6971. template <typename Dispatch>
  6972. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6973. VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
  6974. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6975. {
  6976. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6977. return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR(
  6978. m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
  6979. }
  6980. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6981. template <typename Dispatch>
  6982. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
  6983. Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  6984. {
  6985. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6986. VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
  6987. VkResult result = d.vkGetDeviceGroupSurfacePresentModesKHR(
  6988. m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
  6989. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" );
  6990. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), modes );
  6991. }
  6992. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  6993. template <typename Dispatch>
  6994. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6995. uint32_t * pRectCount,
  6996. VULKAN_HPP_NAMESPACE::Rect2D * pRects,
  6997. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6998. {
  6999. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7000. return static_cast<Result>(
  7001. d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) );
  7002. }
  7003. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7004. template <typename Rect2DAllocator, typename Dispatch>
  7005. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type
  7006. PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  7007. {
  7008. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7009. std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects;
  7010. uint32_t rectCount;
  7011. VkResult result;
  7012. do
  7013. {
  7014. result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr );
  7015. if ( ( result == VK_SUCCESS ) && rectCount )
  7016. {
  7017. rects.resize( rectCount );
  7018. result = d.vkGetPhysicalDevicePresentRectanglesKHR(
  7019. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) );
  7020. }
  7021. } while ( result == VK_INCOMPLETE );
  7022. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
  7023. VULKAN_HPP_ASSERT( rectCount <= rects.size() );
  7024. if ( rectCount < rects.size() )
  7025. {
  7026. rects.resize( rectCount );
  7027. }
  7028. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), rects );
  7029. }
  7030. template <typename Rect2DAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, Rect2D>::value, int>::type>
  7031. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type
  7032. PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d ) const
  7033. {
  7034. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7035. std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects( rect2DAllocator );
  7036. uint32_t rectCount;
  7037. VkResult result;
  7038. do
  7039. {
  7040. result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr );
  7041. if ( ( result == VK_SUCCESS ) && rectCount )
  7042. {
  7043. rects.resize( rectCount );
  7044. result = d.vkGetPhysicalDevicePresentRectanglesKHR(
  7045. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) );
  7046. }
  7047. } while ( result == VK_INCOMPLETE );
  7048. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
  7049. VULKAN_HPP_ASSERT( rectCount <= rects.size() );
  7050. if ( rectCount < rects.size() )
  7051. {
  7052. rects.resize( rectCount );
  7053. }
  7054. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), rects );
  7055. }
  7056. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  7057. template <typename Dispatch>
  7058. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,
  7059. uint32_t * pImageIndex,
  7060. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7061. {
  7062. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7063. return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) );
  7064. }
  7065. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7066. template <typename Dispatch>
  7067. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo,
  7068. Dispatch const & d ) const
  7069. {
  7070. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7071. uint32_t imageIndex;
  7072. VkResult result = d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex );
  7073. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  7074. VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR",
  7075. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  7076. VULKAN_HPP_NAMESPACE::Result::eTimeout,
  7077. VULKAN_HPP_NAMESPACE::Result::eNotReady,
  7078. VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  7079. return ResultValue<uint32_t>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
  7080. }
  7081. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  7082. //=== VK_KHR_display ===
  7083. template <typename Dispatch>
  7084. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount,
  7085. VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,
  7086. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7087. {
  7088. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7089. return static_cast<Result>(
  7090. d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) );
  7091. }
  7092. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7093. template <typename DisplayPropertiesKHRAllocator, typename Dispatch>
  7094. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
  7095. PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const
  7096. {
  7097. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7098. std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties;
  7099. uint32_t propertyCount;
  7100. VkResult result;
  7101. do
  7102. {
  7103. result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
  7104. if ( ( result == VK_SUCCESS ) && propertyCount )
  7105. {
  7106. properties.resize( propertyCount );
  7107. result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) );
  7108. }
  7109. } while ( result == VK_INCOMPLETE );
  7110. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
  7111. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  7112. if ( propertyCount < properties.size() )
  7113. {
  7114. properties.resize( propertyCount );
  7115. }
  7116. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  7117. }
  7118. template <typename DisplayPropertiesKHRAllocator,
  7119. typename Dispatch,
  7120. typename B1,
  7121. typename std::enable_if<std::is_same<typename B1::value_type, DisplayPropertiesKHR>::value, int>::type>
  7122. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
  7123. PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d ) const
  7124. {
  7125. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7126. std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator );
  7127. uint32_t propertyCount;
  7128. VkResult result;
  7129. do
  7130. {
  7131. result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
  7132. if ( ( result == VK_SUCCESS ) && propertyCount )
  7133. {
  7134. properties.resize( propertyCount );
  7135. result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) );
  7136. }
  7137. } while ( result == VK_INCOMPLETE );
  7138. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
  7139. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  7140. if ( propertyCount < properties.size() )
  7141. {
  7142. properties.resize( propertyCount );
  7143. }
  7144. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  7145. }
  7146. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  7147. template <typename Dispatch>
  7148. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount,
  7149. VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,
  7150. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7151. {
  7152. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7153. return static_cast<Result>(
  7154. d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) );
  7155. }
  7156. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7157. template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch>
  7158. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  7159. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
  7160. PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const
  7161. {
  7162. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7163. std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties;
  7164. uint32_t propertyCount;
  7165. VkResult result;
  7166. do
  7167. {
  7168. result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
  7169. if ( ( result == VK_SUCCESS ) && propertyCount )
  7170. {
  7171. properties.resize( propertyCount );
  7172. result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
  7173. m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) );
  7174. }
  7175. } while ( result == VK_INCOMPLETE );
  7176. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
  7177. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  7178. if ( propertyCount < properties.size() )
  7179. {
  7180. properties.resize( propertyCount );
  7181. }
  7182. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  7183. }
  7184. template <typename DisplayPlanePropertiesKHRAllocator,
  7185. typename Dispatch,
  7186. typename B1,
  7187. typename std::enable_if<std::is_same<typename B1::value_type, DisplayPlanePropertiesKHR>::value, int>::type>
  7188. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  7189. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
  7190. PhysicalDevice::getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const
  7191. {
  7192. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7193. std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties( displayPlanePropertiesKHRAllocator );
  7194. uint32_t propertyCount;
  7195. VkResult result;
  7196. do
  7197. {
  7198. result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
  7199. if ( ( result == VK_SUCCESS ) && propertyCount )
  7200. {
  7201. properties.resize( propertyCount );
  7202. result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
  7203. m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) );
  7204. }
  7205. } while ( result == VK_INCOMPLETE );
  7206. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
  7207. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  7208. if ( propertyCount < properties.size() )
  7209. {
  7210. properties.resize( propertyCount );
  7211. }
  7212. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  7213. }
  7214. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  7215. template <typename Dispatch>
  7216. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex,
  7217. uint32_t * pDisplayCount,
  7218. VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,
  7219. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7220. {
  7221. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7222. return static_cast<Result>(
  7223. d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) );
  7224. }
  7225. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7226. template <typename DisplayKHRAllocator, typename Dispatch>
  7227. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type
  7228. PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const
  7229. {
  7230. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7231. std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays;
  7232. uint32_t displayCount;
  7233. VkResult result;
  7234. do
  7235. {
  7236. result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr );
  7237. if ( ( result == VK_SUCCESS ) && displayCount )
  7238. {
  7239. displays.resize( displayCount );
  7240. result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) );
  7241. }
  7242. } while ( result == VK_INCOMPLETE );
  7243. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
  7244. VULKAN_HPP_ASSERT( displayCount <= displays.size() );
  7245. if ( displayCount < displays.size() )
  7246. {
  7247. displays.resize( displayCount );
  7248. }
  7249. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displays );
  7250. }
  7251. template <typename DisplayKHRAllocator,
  7252. typename Dispatch,
  7253. typename B1,
  7254. typename std::enable_if<std::is_same<typename B1::value_type, DisplayKHR>::value, int>::type>
  7255. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type
  7256. PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d ) const
  7257. {
  7258. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7259. std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator );
  7260. uint32_t displayCount;
  7261. VkResult result;
  7262. do
  7263. {
  7264. result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr );
  7265. if ( ( result == VK_SUCCESS ) && displayCount )
  7266. {
  7267. displays.resize( displayCount );
  7268. result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) );
  7269. }
  7270. } while ( result == VK_INCOMPLETE );
  7271. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
  7272. VULKAN_HPP_ASSERT( displayCount <= displays.size() );
  7273. if ( displayCount < displays.size() )
  7274. {
  7275. displays.resize( displayCount );
  7276. }
  7277. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displays );
  7278. }
  7279. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  7280. template <typename Dispatch>
  7281. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  7282. uint32_t * pPropertyCount,
  7283. VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,
  7284. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7285. {
  7286. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7287. return static_cast<Result>( d.vkGetDisplayModePropertiesKHR(
  7288. m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) );
  7289. }
  7290. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7291. template <typename DisplayModePropertiesKHRAllocator, typename Dispatch>
  7292. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  7293. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
  7294. PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  7295. {
  7296. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7297. std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties;
  7298. uint32_t propertyCount;
  7299. VkResult result;
  7300. do
  7301. {
  7302. result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
  7303. if ( ( result == VK_SUCCESS ) && propertyCount )
  7304. {
  7305. properties.resize( propertyCount );
  7306. result = d.vkGetDisplayModePropertiesKHR(
  7307. m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) );
  7308. }
  7309. } while ( result == VK_INCOMPLETE );
  7310. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
  7311. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  7312. if ( propertyCount < properties.size() )
  7313. {
  7314. properties.resize( propertyCount );
  7315. }
  7316. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  7317. }
  7318. template <typename DisplayModePropertiesKHRAllocator,
  7319. typename Dispatch,
  7320. typename B1,
  7321. typename std::enable_if<std::is_same<typename B1::value_type, DisplayModePropertiesKHR>::value, int>::type>
  7322. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  7323. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
  7324. PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  7325. DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,
  7326. Dispatch const & d ) const
  7327. {
  7328. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7329. std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties( displayModePropertiesKHRAllocator );
  7330. uint32_t propertyCount;
  7331. VkResult result;
  7332. do
  7333. {
  7334. result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
  7335. if ( ( result == VK_SUCCESS ) && propertyCount )
  7336. {
  7337. properties.resize( propertyCount );
  7338. result = d.vkGetDisplayModePropertiesKHR(
  7339. m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) );
  7340. }
  7341. } while ( result == VK_INCOMPLETE );
  7342. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
  7343. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  7344. if ( propertyCount < properties.size() )
  7345. {
  7346. properties.resize( propertyCount );
  7347. }
  7348. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  7349. }
  7350. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  7351. template <typename Dispatch>
  7352. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  7353. const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,
  7354. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  7355. VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode,
  7356. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7357. {
  7358. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7359. return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice,
  7360. static_cast<VkDisplayKHR>( display ),
  7361. reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ),
  7362. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  7363. reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) );
  7364. }
  7365. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7366. template <typename Dispatch>
  7367. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type
  7368. PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  7369. const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,
  7370. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7371. Dispatch const & d ) const
  7372. {
  7373. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7374. VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
  7375. VkResult result =
  7376. d.vkCreateDisplayModeKHR( m_physicalDevice,
  7377. static_cast<VkDisplayKHR>( display ),
  7378. reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
  7379. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7380. reinterpret_cast<VkDisplayModeKHR *>( &mode ) );
  7381. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" );
  7382. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), mode );
  7383. }
  7384. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  7385. template <typename Dispatch>
  7386. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type
  7387. PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  7388. const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,
  7389. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7390. Dispatch const & d ) const
  7391. {
  7392. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7393. VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
  7394. VkResult result =
  7395. d.vkCreateDisplayModeKHR( m_physicalDevice,
  7396. static_cast<VkDisplayKHR>( display ),
  7397. reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
  7398. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7399. reinterpret_cast<VkDisplayModeKHR *>( &mode ) );
  7400. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" );
  7401. return createResultValueType(
  7402. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  7403. UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>( mode, ObjectDestroy<PhysicalDevice, Dispatch>( *this, allocator, d ) ) );
  7404. }
  7405. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  7406. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  7407. template <typename Dispatch>
  7408. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  7409. PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,
  7410. uint32_t planeIndex,
  7411. VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities,
  7412. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7413. {
  7414. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7415. return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR(
  7416. m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) );
  7417. }
  7418. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7419. template <typename Dispatch>
  7420. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type
  7421. PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d ) const
  7422. {
  7423. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7424. VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
  7425. VkResult result = d.vkGetDisplayPlaneCapabilitiesKHR(
  7426. m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) );
  7427. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
  7428. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities );
  7429. }
  7430. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  7431. template <typename Dispatch>
  7432. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo,
  7433. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  7434. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  7435. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7436. {
  7437. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7438. return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
  7439. reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ),
  7440. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  7441. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  7442. }
  7443. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7444. template <typename Dispatch>
  7445. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  7446. Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,
  7447. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7448. Dispatch const & d ) const
  7449. {
  7450. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7451. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7452. VkResult result = d.vkCreateDisplayPlaneSurfaceKHR(
  7453. m_instance,
  7454. reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
  7455. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7456. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7457. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" );
  7458. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  7459. }
  7460. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  7461. template <typename Dispatch>
  7462. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  7463. Instance::createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,
  7464. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7465. Dispatch const & d ) const
  7466. {
  7467. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7468. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7469. VkResult result = d.vkCreateDisplayPlaneSurfaceKHR(
  7470. m_instance,
  7471. reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
  7472. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7473. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7474. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" );
  7475. return createResultValueType(
  7476. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  7477. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  7478. }
  7479. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  7480. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  7481. //=== VK_KHR_display_swapchain ===
  7482. template <typename Dispatch>
  7483. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount,
  7484. const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos,
  7485. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  7486. VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
  7487. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7488. {
  7489. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7490. return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device,
  7491. swapchainCount,
  7492. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ),
  7493. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  7494. reinterpret_cast<VkSwapchainKHR *>( pSwapchains ) ) );
  7495. }
  7496. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7497. template <typename SwapchainKHRAllocator, typename Dispatch>
  7498. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type
  7499. Device::createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
  7500. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7501. Dispatch const & d ) const
  7502. {
  7503. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7504. std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size() );
  7505. VkResult result = d.vkCreateSharedSwapchainsKHR(
  7506. m_device,
  7507. createInfos.size(),
  7508. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
  7509. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7510. reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
  7511. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
  7512. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchains );
  7513. }
  7514. template <typename SwapchainKHRAllocator,
  7515. typename Dispatch,
  7516. typename B0,
  7517. typename std::enable_if<std::is_same<typename B0::value_type, SwapchainKHR>::value, int>::type>
  7518. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type
  7519. Device::createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
  7520. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7521. SwapchainKHRAllocator & swapchainKHRAllocator,
  7522. Dispatch const & d ) const
  7523. {
  7524. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7525. std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size(), swapchainKHRAllocator );
  7526. VkResult result = d.vkCreateSharedSwapchainsKHR(
  7527. m_device,
  7528. createInfos.size(),
  7529. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
  7530. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7531. reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
  7532. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
  7533. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchains );
  7534. }
  7535. template <typename Dispatch>
  7536. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type
  7537. Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
  7538. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7539. Dispatch const & d ) const
  7540. {
  7541. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7542. VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
  7543. VkResult result = d.vkCreateSharedSwapchainsKHR(
  7544. m_device,
  7545. 1,
  7546. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
  7547. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7548. reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
  7549. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" );
  7550. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchain );
  7551. }
  7552. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  7553. template <typename Dispatch, typename SwapchainKHRAllocator>
  7554. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  7555. typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
  7556. Device::createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
  7557. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7558. Dispatch const & d ) const
  7559. {
  7560. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7561. std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() );
  7562. VkResult result = d.vkCreateSharedSwapchainsKHR(
  7563. m_device,
  7564. createInfos.size(),
  7565. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
  7566. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7567. reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
  7568. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
  7569. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains;
  7570. uniqueSwapchains.reserve( createInfos.size() );
  7571. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  7572. for ( auto const & swapchain : swapchains )
  7573. {
  7574. uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) );
  7575. }
  7576. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueSwapchains ) );
  7577. }
  7578. template <typename Dispatch,
  7579. typename SwapchainKHRAllocator,
  7580. typename B0,
  7581. typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type>
  7582. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  7583. typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
  7584. Device::createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
  7585. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7586. SwapchainKHRAllocator & swapchainKHRAllocator,
  7587. Dispatch const & d ) const
  7588. {
  7589. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7590. std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() );
  7591. VkResult result = d.vkCreateSharedSwapchainsKHR(
  7592. m_device,
  7593. createInfos.size(),
  7594. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
  7595. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7596. reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
  7597. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
  7598. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator );
  7599. uniqueSwapchains.reserve( createInfos.size() );
  7600. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  7601. for ( auto const & swapchain : swapchains )
  7602. {
  7603. uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) );
  7604. }
  7605. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueSwapchains ) );
  7606. }
  7607. template <typename Dispatch>
  7608. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type
  7609. Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
  7610. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7611. Dispatch const & d ) const
  7612. {
  7613. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7614. VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
  7615. VkResult result = d.vkCreateSharedSwapchainsKHR(
  7616. m_device,
  7617. 1,
  7618. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
  7619. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7620. reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
  7621. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" );
  7622. return createResultValueType(
  7623. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  7624. UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  7625. }
  7626. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  7627. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  7628. #if defined( VK_USE_PLATFORM_XLIB_KHR )
  7629. //=== VK_KHR_xlib_surface ===
  7630. template <typename Dispatch>
  7631. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,
  7632. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  7633. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  7634. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7635. {
  7636. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7637. return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance,
  7638. reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ),
  7639. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  7640. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  7641. }
  7642. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7643. template <typename Dispatch>
  7644. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  7645. Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,
  7646. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7647. Dispatch const & d ) const
  7648. {
  7649. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7650. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7651. VkResult result =
  7652. d.vkCreateXlibSurfaceKHR( m_instance,
  7653. reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
  7654. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7655. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7656. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" );
  7657. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  7658. }
  7659. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  7660. template <typename Dispatch>
  7661. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  7662. Instance::createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,
  7663. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7664. Dispatch const & d ) const
  7665. {
  7666. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7667. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7668. VkResult result =
  7669. d.vkCreateXlibSurfaceKHR( m_instance,
  7670. reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
  7671. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7672. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7673. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" );
  7674. return createResultValueType(
  7675. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  7676. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  7677. }
  7678. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  7679. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  7680. template <typename Dispatch>
  7681. VULKAN_HPP_INLINE Bool32
  7682. PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7683. {
  7684. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7685. return static_cast<Bool32>( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) );
  7686. }
  7687. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7688. template <typename Dispatch>
  7689. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
  7690. PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7691. {
  7692. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7693. VkBool32 result = d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
  7694. return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
  7695. }
  7696. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  7697. #endif /*VK_USE_PLATFORM_XLIB_KHR*/
  7698. #if defined( VK_USE_PLATFORM_XCB_KHR )
  7699. //=== VK_KHR_xcb_surface ===
  7700. template <typename Dispatch>
  7701. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,
  7702. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  7703. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  7704. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7705. {
  7706. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7707. return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance,
  7708. reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ),
  7709. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  7710. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  7711. }
  7712. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7713. template <typename Dispatch>
  7714. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  7715. Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,
  7716. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7717. Dispatch const & d ) const
  7718. {
  7719. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7720. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7721. VkResult result =
  7722. d.vkCreateXcbSurfaceKHR( m_instance,
  7723. reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
  7724. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7725. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7726. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" );
  7727. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  7728. }
  7729. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  7730. template <typename Dispatch>
  7731. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  7732. Instance::createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,
  7733. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7734. Dispatch const & d ) const
  7735. {
  7736. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7737. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7738. VkResult result =
  7739. d.vkCreateXcbSurfaceKHR( m_instance,
  7740. reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
  7741. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7742. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7743. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" );
  7744. return createResultValueType(
  7745. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  7746. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  7747. }
  7748. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  7749. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  7750. template <typename Dispatch>
  7751. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
  7752. xcb_connection_t * connection,
  7753. xcb_visualid_t visual_id,
  7754. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7755. {
  7756. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7757. return static_cast<Bool32>( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) );
  7758. }
  7759. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7760. template <typename Dispatch>
  7761. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
  7762. xcb_connection_t & connection,
  7763. xcb_visualid_t visual_id,
  7764. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7765. {
  7766. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7767. VkBool32 result = d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
  7768. return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
  7769. }
  7770. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  7771. #endif /*VK_USE_PLATFORM_XCB_KHR*/
  7772. #if defined( VK_USE_PLATFORM_WAYLAND_KHR )
  7773. //=== VK_KHR_wayland_surface ===
  7774. template <typename Dispatch>
  7775. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,
  7776. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  7777. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  7778. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7779. {
  7780. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7781. return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance,
  7782. reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ),
  7783. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  7784. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  7785. }
  7786. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7787. template <typename Dispatch>
  7788. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  7789. Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,
  7790. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7791. Dispatch const & d ) const
  7792. {
  7793. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7794. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7795. VkResult result = d.vkCreateWaylandSurfaceKHR(
  7796. m_instance,
  7797. reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
  7798. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7799. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7800. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" );
  7801. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  7802. }
  7803. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  7804. template <typename Dispatch>
  7805. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  7806. Instance::createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,
  7807. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7808. Dispatch const & d ) const
  7809. {
  7810. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7811. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7812. VkResult result = d.vkCreateWaylandSurfaceKHR(
  7813. m_instance,
  7814. reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
  7815. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7816. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7817. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" );
  7818. return createResultValueType(
  7819. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  7820. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  7821. }
  7822. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  7823. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  7824. template <typename Dispatch>
  7825. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex,
  7826. struct wl_display * display,
  7827. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7828. {
  7829. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7830. return static_cast<Bool32>( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) );
  7831. }
  7832. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7833. template <typename Dispatch>
  7834. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
  7835. PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7836. {
  7837. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7838. VkBool32 result = d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
  7839. return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
  7840. }
  7841. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  7842. #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  7843. #if defined( VK_USE_PLATFORM_ANDROID_KHR )
  7844. //=== VK_KHR_android_surface ===
  7845. template <typename Dispatch>
  7846. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,
  7847. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  7848. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  7849. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7850. {
  7851. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7852. return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance,
  7853. reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ),
  7854. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  7855. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  7856. }
  7857. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7858. template <typename Dispatch>
  7859. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  7860. Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,
  7861. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7862. Dispatch const & d ) const
  7863. {
  7864. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7865. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7866. VkResult result = d.vkCreateAndroidSurfaceKHR(
  7867. m_instance,
  7868. reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
  7869. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7870. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7871. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" );
  7872. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  7873. }
  7874. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  7875. template <typename Dispatch>
  7876. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  7877. Instance::createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,
  7878. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7879. Dispatch const & d ) const
  7880. {
  7881. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7882. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7883. VkResult result = d.vkCreateAndroidSurfaceKHR(
  7884. m_instance,
  7885. reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
  7886. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7887. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7888. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" );
  7889. return createResultValueType(
  7890. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  7891. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  7892. }
  7893. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  7894. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  7895. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  7896. #if defined( VK_USE_PLATFORM_WIN32_KHR )
  7897. //=== VK_KHR_win32_surface ===
  7898. template <typename Dispatch>
  7899. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,
  7900. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  7901. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  7902. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7903. {
  7904. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7905. return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance,
  7906. reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ),
  7907. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  7908. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  7909. }
  7910. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7911. template <typename Dispatch>
  7912. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  7913. Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,
  7914. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7915. Dispatch const & d ) const
  7916. {
  7917. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7918. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7919. VkResult result =
  7920. d.vkCreateWin32SurfaceKHR( m_instance,
  7921. reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
  7922. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7923. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7924. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" );
  7925. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  7926. }
  7927. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  7928. template <typename Dispatch>
  7929. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  7930. Instance::createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,
  7931. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7932. Dispatch const & d ) const
  7933. {
  7934. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7935. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7936. VkResult result =
  7937. d.vkCreateWin32SurfaceKHR( m_instance,
  7938. reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
  7939. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7940. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7941. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" );
  7942. return createResultValueType(
  7943. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  7944. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  7945. }
  7946. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  7947. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  7948. template <typename Dispatch>
  7949. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7950. {
  7951. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7952. return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) );
  7953. }
  7954. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7955. //=== VK_EXT_debug_report ===
  7956. template <typename Dispatch>
  7957. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  7958. Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo,
  7959. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  7960. VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback,
  7961. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7962. {
  7963. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7964. return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance,
  7965. reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ),
  7966. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  7967. reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) );
  7968. }
  7969. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7970. template <typename Dispatch>
  7971. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type
  7972. Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,
  7973. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7974. Dispatch const & d ) const
  7975. {
  7976. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7977. VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
  7978. VkResult result = d.vkCreateDebugReportCallbackEXT(
  7979. m_instance,
  7980. reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
  7981. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7982. reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) );
  7983. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" );
  7984. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), callback );
  7985. }
  7986. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  7987. template <typename Dispatch>
  7988. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type
  7989. Instance::createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,
  7990. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7991. Dispatch const & d ) const
  7992. {
  7993. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7994. VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
  7995. VkResult result = d.vkCreateDebugReportCallbackEXT(
  7996. m_instance,
  7997. reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
  7998. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7999. reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) );
  8000. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" );
  8001. return createResultValueType(
  8002. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  8003. UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>( callback, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  8004. }
  8005. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  8006. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8007. template <typename Dispatch>
  8008. VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
  8009. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8010. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8011. {
  8012. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8013. d.vkDestroyDebugReportCallbackEXT(
  8014. m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8015. }
  8016. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8017. template <typename Dispatch>
  8018. VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
  8019. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8020. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8021. {
  8022. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8023. d.vkDestroyDebugReportCallbackEXT(
  8024. m_instance,
  8025. static_cast<VkDebugReportCallbackEXT>( callback ),
  8026. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  8027. }
  8028. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8029. template <typename Dispatch>
  8030. VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
  8031. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8032. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8033. {
  8034. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8035. d.vkDestroyDebugReportCallbackEXT(
  8036. m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8037. }
  8038. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8039. template <typename Dispatch>
  8040. VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
  8041. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8042. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8043. {
  8044. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8045. d.vkDestroyDebugReportCallbackEXT(
  8046. m_instance,
  8047. static_cast<VkDebugReportCallbackEXT>( callback ),
  8048. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  8049. }
  8050. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8051. template <typename Dispatch>
  8052. VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
  8053. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType,
  8054. uint64_t object,
  8055. size_t location,
  8056. int32_t messageCode,
  8057. const char * pLayerPrefix,
  8058. const char * pMessage,
  8059. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8060. {
  8061. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8062. d.vkDebugReportMessageEXT( m_instance,
  8063. static_cast<VkDebugReportFlagsEXT>( flags ),
  8064. static_cast<VkDebugReportObjectTypeEXT>( objectType ),
  8065. object,
  8066. location,
  8067. messageCode,
  8068. pLayerPrefix,
  8069. pMessage );
  8070. }
  8071. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8072. template <typename Dispatch>
  8073. VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
  8074. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType,
  8075. uint64_t object,
  8076. size_t location,
  8077. int32_t messageCode,
  8078. const std::string & layerPrefix,
  8079. const std::string & message,
  8080. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8081. {
  8082. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8083. d.vkDebugReportMessageEXT( m_instance,
  8084. static_cast<VkDebugReportFlagsEXT>( flags ),
  8085. static_cast<VkDebugReportObjectTypeEXT>( objectType ),
  8086. object,
  8087. location,
  8088. messageCode,
  8089. layerPrefix.c_str(),
  8090. message.c_str() );
  8091. }
  8092. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8093. //=== VK_EXT_debug_marker ===
  8094. template <typename Dispatch>
  8095. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo,
  8096. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8097. {
  8098. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8099. return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) );
  8100. }
  8101. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8102. template <typename Dispatch>
  8103. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  8104. Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
  8105. {
  8106. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8107. VkResult result = d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) );
  8108. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" );
  8109. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  8110. }
  8111. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8112. template <typename Dispatch>
  8113. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo,
  8114. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8115. {
  8116. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8117. return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) );
  8118. }
  8119. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8120. template <typename Dispatch>
  8121. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  8122. Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
  8123. {
  8124. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8125. VkResult result = d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) );
  8126. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" );
  8127. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  8128. }
  8129. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8130. template <typename Dispatch>
  8131. VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
  8132. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8133. {
  8134. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8135. d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
  8136. }
  8137. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8138. template <typename Dispatch>
  8139. VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,
  8140. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8141. {
  8142. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8143. d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
  8144. }
  8145. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8146. template <typename Dispatch>
  8147. VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8148. {
  8149. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8150. d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
  8151. }
  8152. template <typename Dispatch>
  8153. VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
  8154. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8155. {
  8156. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8157. d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
  8158. }
  8159. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8160. template <typename Dispatch>
  8161. VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,
  8162. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8163. {
  8164. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8165. d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
  8166. }
  8167. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8168. #if defined( VK_ENABLE_BETA_EXTENSIONS )
  8169. //=== VK_KHR_video_queue ===
  8170. template <typename Dispatch>
  8171. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile,
  8172. VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities,
  8173. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8174. {
  8175. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8176. return static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
  8177. m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( pVideoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) );
  8178. }
  8179. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8180. template <typename Dispatch>
  8181. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type
  8182. PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const
  8183. {
  8184. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8185. VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities;
  8186. VkResult result = d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
  8187. m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) );
  8188. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
  8189. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities );
  8190. }
  8191. template <typename X, typename Y, typename... Z, typename Dispatch>
  8192. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
  8193. PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const
  8194. {
  8195. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8196. StructureChain<X, Y, Z...> structureChain;
  8197. VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>();
  8198. VkResult result = d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
  8199. m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) );
  8200. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
  8201. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
  8202. }
  8203. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8204. template <typename Dispatch>
  8205. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  8206. PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,
  8207. uint32_t * pVideoFormatPropertyCount,
  8208. VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties,
  8209. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8210. {
  8211. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8212. return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
  8213. reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ),
  8214. pVideoFormatPropertyCount,
  8215. reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) );
  8216. }
  8217. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8218. template <typename VideoFormatPropertiesKHRAllocator, typename Dispatch>
  8219. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  8220. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
  8221. PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const
  8222. {
  8223. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8224. std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties;
  8225. uint32_t videoFormatPropertyCount;
  8226. VkResult result;
  8227. do
  8228. {
  8229. result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
  8230. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr );
  8231. if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount )
  8232. {
  8233. videoFormatProperties.resize( videoFormatPropertyCount );
  8234. result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
  8235. reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
  8236. &videoFormatPropertyCount,
  8237. reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) );
  8238. }
  8239. } while ( result == VK_INCOMPLETE );
  8240. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
  8241. VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
  8242. if ( videoFormatPropertyCount < videoFormatProperties.size() )
  8243. {
  8244. videoFormatProperties.resize( videoFormatPropertyCount );
  8245. }
  8246. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoFormatProperties );
  8247. }
  8248. template <typename VideoFormatPropertiesKHRAllocator,
  8249. typename Dispatch,
  8250. typename B1,
  8251. typename std::enable_if<std::is_same<typename B1::value_type, VideoFormatPropertiesKHR>::value, int>::type>
  8252. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  8253. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
  8254. PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
  8255. VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,
  8256. Dispatch const & d ) const
  8257. {
  8258. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8259. std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties( videoFormatPropertiesKHRAllocator );
  8260. uint32_t videoFormatPropertyCount;
  8261. VkResult result;
  8262. do
  8263. {
  8264. result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
  8265. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr );
  8266. if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount )
  8267. {
  8268. videoFormatProperties.resize( videoFormatPropertyCount );
  8269. result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
  8270. reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
  8271. &videoFormatPropertyCount,
  8272. reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) );
  8273. }
  8274. } while ( result == VK_INCOMPLETE );
  8275. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
  8276. VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
  8277. if ( videoFormatPropertyCount < videoFormatProperties.size() )
  8278. {
  8279. videoFormatProperties.resize( videoFormatPropertyCount );
  8280. }
  8281. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoFormatProperties );
  8282. }
  8283. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8284. template <typename Dispatch>
  8285. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo,
  8286. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8287. VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession,
  8288. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8289. {
  8290. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8291. return static_cast<Result>( d.vkCreateVideoSessionKHR( m_device,
  8292. reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( pCreateInfo ),
  8293. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  8294. reinterpret_cast<VkVideoSessionKHR *>( pVideoSession ) ) );
  8295. }
  8296. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8297. template <typename Dispatch>
  8298. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::type
  8299. Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,
  8300. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8301. Dispatch const & d ) const
  8302. {
  8303. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8304. VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession;
  8305. VkResult result =
  8306. d.vkCreateVideoSessionKHR( m_device,
  8307. reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ),
  8308. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  8309. reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) );
  8310. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" );
  8311. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoSession );
  8312. }
  8313. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  8314. template <typename Dispatch>
  8315. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type
  8316. Device::createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,
  8317. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8318. Dispatch const & d ) const
  8319. {
  8320. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8321. VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession;
  8322. VkResult result =
  8323. d.vkCreateVideoSessionKHR( m_device,
  8324. reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ),
  8325. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  8326. reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) );
  8327. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" );
  8328. return createResultValueType(
  8329. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  8330. UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>( videoSession, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  8331. }
  8332. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  8333. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8334. template <typename Dispatch>
  8335. VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
  8336. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8337. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8338. {
  8339. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8340. d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8341. }
  8342. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8343. template <typename Dispatch>
  8344. VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
  8345. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8346. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8347. {
  8348. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8349. d.vkDestroyVideoSessionKHR(
  8350. m_device,
  8351. static_cast<VkVideoSessionKHR>( videoSession ),
  8352. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  8353. }
  8354. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8355. template <typename Dispatch>
  8356. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
  8357. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8358. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8359. {
  8360. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8361. d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8362. }
  8363. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8364. template <typename Dispatch>
  8365. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
  8366. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8367. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8368. {
  8369. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8370. d.vkDestroyVideoSessionKHR(
  8371. m_device,
  8372. static_cast<VkVideoSessionKHR>( videoSession ),
  8373. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  8374. }
  8375. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8376. template <typename Dispatch>
  8377. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  8378. Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
  8379. uint32_t * pMemoryRequirementsCount,
  8380. VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements,
  8381. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8382. {
  8383. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8384. return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
  8385. static_cast<VkVideoSessionKHR>( videoSession ),
  8386. pMemoryRequirementsCount,
  8387. reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( pMemoryRequirements ) ) );
  8388. }
  8389. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8390. template <typename VideoSessionMemoryRequirementsKHRAllocator, typename Dispatch>
  8391. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  8392. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type
  8393. Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch const & d ) const
  8394. {
  8395. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8396. std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements;
  8397. uint32_t memoryRequirementsCount;
  8398. VkResult result;
  8399. do
  8400. {
  8401. result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr );
  8402. if ( ( result == VK_SUCCESS ) && memoryRequirementsCount )
  8403. {
  8404. memoryRequirements.resize( memoryRequirementsCount );
  8405. result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
  8406. static_cast<VkVideoSessionKHR>( videoSession ),
  8407. &memoryRequirementsCount,
  8408. reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) );
  8409. }
  8410. } while ( result == VK_INCOMPLETE );
  8411. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" );
  8412. VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() );
  8413. if ( memoryRequirementsCount < memoryRequirements.size() )
  8414. {
  8415. memoryRequirements.resize( memoryRequirementsCount );
  8416. }
  8417. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryRequirements );
  8418. }
  8419. template <typename VideoSessionMemoryRequirementsKHRAllocator,
  8420. typename Dispatch,
  8421. typename B1,
  8422. typename std::enable_if<std::is_same<typename B1::value_type, VideoSessionMemoryRequirementsKHR>::value, int>::type>
  8423. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  8424. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type
  8425. Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
  8426. VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator,
  8427. Dispatch const & d ) const
  8428. {
  8429. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8430. std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements(
  8431. videoSessionMemoryRequirementsKHRAllocator );
  8432. uint32_t memoryRequirementsCount;
  8433. VkResult result;
  8434. do
  8435. {
  8436. result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr );
  8437. if ( ( result == VK_SUCCESS ) && memoryRequirementsCount )
  8438. {
  8439. memoryRequirements.resize( memoryRequirementsCount );
  8440. result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
  8441. static_cast<VkVideoSessionKHR>( videoSession ),
  8442. &memoryRequirementsCount,
  8443. reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) );
  8444. }
  8445. } while ( result == VK_INCOMPLETE );
  8446. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getVideoSessionMemoryRequirementsKHR" );
  8447. VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() );
  8448. if ( memoryRequirementsCount < memoryRequirements.size() )
  8449. {
  8450. memoryRequirements.resize( memoryRequirementsCount );
  8451. }
  8452. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryRequirements );
  8453. }
  8454. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8455. template <typename Dispatch>
  8456. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  8457. Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
  8458. uint32_t bindSessionMemoryInfoCount,
  8459. const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos,
  8460. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8461. {
  8462. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8463. return static_cast<Result>( d.vkBindVideoSessionMemoryKHR( m_device,
  8464. static_cast<VkVideoSessionKHR>( videoSession ),
  8465. bindSessionMemoryInfoCount,
  8466. reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( pBindSessionMemoryInfos ) ) );
  8467. }
  8468. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8469. template <typename Dispatch>
  8470. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  8471. Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
  8472. ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos,
  8473. Dispatch const & d ) const
  8474. {
  8475. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8476. VkResult result = d.vkBindVideoSessionMemoryKHR( m_device,
  8477. static_cast<VkVideoSessionKHR>( videoSession ),
  8478. bindSessionMemoryInfos.size(),
  8479. reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( bindSessionMemoryInfos.data() ) );
  8480. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" );
  8481. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  8482. }
  8483. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8484. template <typename Dispatch>
  8485. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  8486. Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,
  8487. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8488. VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters,
  8489. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8490. {
  8491. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8492. return static_cast<Result>( d.vkCreateVideoSessionParametersKHR( m_device,
  8493. reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( pCreateInfo ),
  8494. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  8495. reinterpret_cast<VkVideoSessionParametersKHR *>( pVideoSessionParameters ) ) );
  8496. }
  8497. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8498. template <typename Dispatch>
  8499. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::type
  8500. Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,
  8501. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8502. Dispatch const & d ) const
  8503. {
  8504. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8505. VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters;
  8506. VkResult result = d.vkCreateVideoSessionParametersKHR(
  8507. m_device,
  8508. reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ),
  8509. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  8510. reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) );
  8511. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" );
  8512. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoSessionParameters );
  8513. }
  8514. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  8515. template <typename Dispatch>
  8516. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type
  8517. Device::createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,
  8518. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8519. Dispatch const & d ) const
  8520. {
  8521. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8522. VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters;
  8523. VkResult result = d.vkCreateVideoSessionParametersKHR(
  8524. m_device,
  8525. reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ),
  8526. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  8527. reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) );
  8528. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" );
  8529. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  8530. UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>(
  8531. videoSessionParameters, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  8532. }
  8533. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  8534. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8535. template <typename Dispatch>
  8536. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  8537. Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
  8538. const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,
  8539. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8540. {
  8541. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8542. return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( m_device,
  8543. static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
  8544. reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) );
  8545. }
  8546. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8547. template <typename Dispatch>
  8548. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  8549. Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
  8550. const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo,
  8551. Dispatch const & d ) const
  8552. {
  8553. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8554. VkResult result = d.vkUpdateVideoSessionParametersKHR( m_device,
  8555. static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
  8556. reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) );
  8557. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" );
  8558. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  8559. }
  8560. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8561. template <typename Dispatch>
  8562. VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
  8563. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8564. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8565. {
  8566. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8567. d.vkDestroyVideoSessionParametersKHR(
  8568. m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8569. }
  8570. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8571. template <typename Dispatch>
  8572. VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
  8573. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8574. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8575. {
  8576. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8577. d.vkDestroyVideoSessionParametersKHR(
  8578. m_device,
  8579. static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
  8580. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  8581. }
  8582. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8583. template <typename Dispatch>
  8584. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
  8585. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8586. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8587. {
  8588. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8589. d.vkDestroyVideoSessionParametersKHR(
  8590. m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8591. }
  8592. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8593. template <typename Dispatch>
  8594. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
  8595. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8596. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8597. {
  8598. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8599. d.vkDestroyVideoSessionParametersKHR(
  8600. m_device,
  8601. static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
  8602. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  8603. }
  8604. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8605. template <typename Dispatch>
  8606. VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,
  8607. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8608. {
  8609. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8610. d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) );
  8611. }
  8612. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8613. template <typename Dispatch>
  8614. VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo,
  8615. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8616. {
  8617. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8618. d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) );
  8619. }
  8620. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8621. template <typename Dispatch>
  8622. VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,
  8623. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8624. {
  8625. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8626. d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) );
  8627. }
  8628. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8629. template <typename Dispatch>
  8630. VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo,
  8631. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8632. {
  8633. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8634. d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) );
  8635. }
  8636. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8637. template <typename Dispatch>
  8638. VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,
  8639. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8640. {
  8641. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8642. d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) );
  8643. }
  8644. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8645. template <typename Dispatch>
  8646. VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo,
  8647. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8648. {
  8649. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8650. d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) );
  8651. }
  8652. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8653. #endif /*VK_ENABLE_BETA_EXTENSIONS*/
  8654. #if defined( VK_ENABLE_BETA_EXTENSIONS )
  8655. //=== VK_KHR_video_decode_queue ===
  8656. template <typename Dispatch>
  8657. VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pFrameInfo,
  8658. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8659. {
  8660. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8661. d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( pFrameInfo ) );
  8662. }
  8663. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8664. template <typename Dispatch>
  8665. VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & frameInfo,
  8666. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8667. {
  8668. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8669. d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( &frameInfo ) );
  8670. }
  8671. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8672. #endif /*VK_ENABLE_BETA_EXTENSIONS*/
  8673. //=== VK_EXT_transform_feedback ===
  8674. template <typename Dispatch>
  8675. VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
  8676. uint32_t bindingCount,
  8677. const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
  8678. const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
  8679. const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
  8680. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8681. {
  8682. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8683. d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer,
  8684. firstBinding,
  8685. bindingCount,
  8686. reinterpret_cast<const VkBuffer *>( pBuffers ),
  8687. reinterpret_cast<const VkDeviceSize *>( pOffsets ),
  8688. reinterpret_cast<const VkDeviceSize *>( pSizes ) );
  8689. }
  8690. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8691. template <typename Dispatch>
  8692. VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
  8693. ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
  8694. ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
  8695. ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
  8696. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  8697. {
  8698. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8699. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  8700. VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
  8701. VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
  8702. # else
  8703. if ( buffers.size() != offsets.size() )
  8704. {
  8705. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
  8706. }
  8707. if ( !sizes.empty() && buffers.size() != sizes.size() )
  8708. {
  8709. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
  8710. }
  8711. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  8712. d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer,
  8713. firstBinding,
  8714. buffers.size(),
  8715. reinterpret_cast<const VkBuffer *>( buffers.data() ),
  8716. reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
  8717. reinterpret_cast<const VkDeviceSize *>( sizes.data() ) );
  8718. }
  8719. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8720. template <typename Dispatch>
  8721. VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
  8722. uint32_t counterBufferCount,
  8723. const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
  8724. const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
  8725. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8726. {
  8727. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8728. d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer,
  8729. firstCounterBuffer,
  8730. counterBufferCount,
  8731. reinterpret_cast<const VkBuffer *>( pCounterBuffers ),
  8732. reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
  8733. }
  8734. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8735. template <typename Dispatch>
  8736. VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
  8737. ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
  8738. ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,
  8739. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  8740. {
  8741. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8742. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  8743. VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
  8744. # else
  8745. if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
  8746. {
  8747. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
  8748. }
  8749. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  8750. d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer,
  8751. firstCounterBuffer,
  8752. counterBuffers.size(),
  8753. reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
  8754. reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
  8755. }
  8756. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8757. template <typename Dispatch>
  8758. VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer,
  8759. uint32_t counterBufferCount,
  8760. const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
  8761. const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
  8762. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8763. {
  8764. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8765. d.vkCmdEndTransformFeedbackEXT( m_commandBuffer,
  8766. firstCounterBuffer,
  8767. counterBufferCount,
  8768. reinterpret_cast<const VkBuffer *>( pCounterBuffers ),
  8769. reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
  8770. }
  8771. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8772. template <typename Dispatch>
  8773. VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer,
  8774. ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
  8775. ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,
  8776. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  8777. {
  8778. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8779. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  8780. VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
  8781. # else
  8782. if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
  8783. {
  8784. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
  8785. }
  8786. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  8787. d.vkCmdEndTransformFeedbackEXT( m_commandBuffer,
  8788. firstCounterBuffer,
  8789. counterBuffers.size(),
  8790. reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
  8791. reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
  8792. }
  8793. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8794. template <typename Dispatch>
  8795. VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  8796. uint32_t query,
  8797. VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
  8798. uint32_t index,
  8799. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8800. {
  8801. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8802. d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
  8803. }
  8804. template <typename Dispatch>
  8805. VULKAN_HPP_INLINE void
  8806. CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8807. {
  8808. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8809. d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
  8810. }
  8811. template <typename Dispatch>
  8812. VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount,
  8813. uint32_t firstInstance,
  8814. VULKAN_HPP_NAMESPACE::Buffer counterBuffer,
  8815. VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,
  8816. uint32_t counterOffset,
  8817. uint32_t vertexStride,
  8818. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8819. {
  8820. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8821. d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer,
  8822. instanceCount,
  8823. firstInstance,
  8824. static_cast<VkBuffer>( counterBuffer ),
  8825. static_cast<VkDeviceSize>( counterBufferOffset ),
  8826. counterOffset,
  8827. vertexStride );
  8828. }
  8829. //=== VK_NVX_binary_import ===
  8830. template <typename Dispatch>
  8831. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo,
  8832. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8833. VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule,
  8834. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8835. {
  8836. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8837. return static_cast<Result>( d.vkCreateCuModuleNVX( m_device,
  8838. reinterpret_cast<const VkCuModuleCreateInfoNVX *>( pCreateInfo ),
  8839. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  8840. reinterpret_cast<VkCuModuleNVX *>( pModule ) ) );
  8841. }
  8842. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8843. template <typename Dispatch>
  8844. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type
  8845. Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,
  8846. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8847. Dispatch const & d ) const
  8848. {
  8849. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8850. VULKAN_HPP_NAMESPACE::CuModuleNVX module;
  8851. VkResult result =
  8852. d.vkCreateCuModuleNVX( m_device,
  8853. reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
  8854. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  8855. reinterpret_cast<VkCuModuleNVX *>( &module ) );
  8856. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" );
  8857. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), module );
  8858. }
  8859. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  8860. template <typename Dispatch>
  8861. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type
  8862. Device::createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,
  8863. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8864. Dispatch const & d ) const
  8865. {
  8866. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8867. VULKAN_HPP_NAMESPACE::CuModuleNVX module;
  8868. VkResult result =
  8869. d.vkCreateCuModuleNVX( m_device,
  8870. reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
  8871. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  8872. reinterpret_cast<VkCuModuleNVX *>( &module ) );
  8873. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" );
  8874. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  8875. UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>( module, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  8876. }
  8877. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  8878. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8879. template <typename Dispatch>
  8880. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo,
  8881. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8882. VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction,
  8883. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8884. {
  8885. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8886. return static_cast<Result>( d.vkCreateCuFunctionNVX( m_device,
  8887. reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ),
  8888. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  8889. reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) );
  8890. }
  8891. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8892. template <typename Dispatch>
  8893. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type
  8894. Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,
  8895. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8896. Dispatch const & d ) const
  8897. {
  8898. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8899. VULKAN_HPP_NAMESPACE::CuFunctionNVX function;
  8900. VkResult result =
  8901. d.vkCreateCuFunctionNVX( m_device,
  8902. reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
  8903. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  8904. reinterpret_cast<VkCuFunctionNVX *>( &function ) );
  8905. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" );
  8906. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), function );
  8907. }
  8908. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  8909. template <typename Dispatch>
  8910. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type
  8911. Device::createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,
  8912. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8913. Dispatch const & d ) const
  8914. {
  8915. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8916. VULKAN_HPP_NAMESPACE::CuFunctionNVX function;
  8917. VkResult result =
  8918. d.vkCreateCuFunctionNVX( m_device,
  8919. reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
  8920. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  8921. reinterpret_cast<VkCuFunctionNVX *>( &function ) );
  8922. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" );
  8923. return createResultValueType(
  8924. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  8925. UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>( function, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  8926. }
  8927. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  8928. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8929. template <typename Dispatch>
  8930. VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
  8931. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8932. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8933. {
  8934. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8935. d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8936. }
  8937. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8938. template <typename Dispatch>
  8939. VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
  8940. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8941. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8942. {
  8943. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8944. d.vkDestroyCuModuleNVX( m_device,
  8945. static_cast<VkCuModuleNVX>( module ),
  8946. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  8947. }
  8948. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8949. template <typename Dispatch>
  8950. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
  8951. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8952. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8953. {
  8954. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8955. d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8956. }
  8957. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8958. template <typename Dispatch>
  8959. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
  8960. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8961. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8962. {
  8963. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8964. d.vkDestroyCuModuleNVX( m_device,
  8965. static_cast<VkCuModuleNVX>( module ),
  8966. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  8967. }
  8968. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8969. template <typename Dispatch>
  8970. VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
  8971. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8972. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8973. {
  8974. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8975. d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8976. }
  8977. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8978. template <typename Dispatch>
  8979. VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
  8980. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8981. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8982. {
  8983. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8984. d.vkDestroyCuFunctionNVX( m_device,
  8985. static_cast<VkCuFunctionNVX>( function ),
  8986. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  8987. }
  8988. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  8989. template <typename Dispatch>
  8990. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
  8991. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8992. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8993. {
  8994. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8995. d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8996. }
  8997. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8998. template <typename Dispatch>
  8999. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
  9000. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  9001. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9002. {
  9003. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9004. d.vkDestroyCuFunctionNVX( m_device,
  9005. static_cast<VkCuFunctionNVX>( function ),
  9006. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  9007. }
  9008. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9009. template <typename Dispatch>
  9010. VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo,
  9011. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9012. {
  9013. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9014. d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) );
  9015. }
  9016. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9017. template <typename Dispatch>
  9018. VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo,
  9019. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9020. {
  9021. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9022. d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) );
  9023. }
  9024. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9025. //=== VK_NVX_image_view_handle ===
  9026. template <typename Dispatch>
  9027. VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo,
  9028. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9029. {
  9030. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9031. return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) );
  9032. }
  9033. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9034. template <typename Dispatch>
  9035. VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info,
  9036. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9037. {
  9038. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9039. uint32_t result = d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) );
  9040. return result;
  9041. }
  9042. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9043. template <typename Dispatch>
  9044. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView,
  9045. VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties,
  9046. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9047. {
  9048. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9049. return static_cast<Result>(
  9050. d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) );
  9051. }
  9052. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9053. template <typename Dispatch>
  9054. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type
  9055. Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const
  9056. {
  9057. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9058. VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties;
  9059. VkResult result =
  9060. d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) );
  9061. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" );
  9062. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  9063. }
  9064. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9065. //=== VK_AMD_draw_indirect_count ===
  9066. template <typename Dispatch>
  9067. VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
  9068. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  9069. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  9070. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  9071. uint32_t maxDrawCount,
  9072. uint32_t stride,
  9073. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9074. {
  9075. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9076. d.vkCmdDrawIndirectCountAMD( m_commandBuffer,
  9077. static_cast<VkBuffer>( buffer ),
  9078. static_cast<VkDeviceSize>( offset ),
  9079. static_cast<VkBuffer>( countBuffer ),
  9080. static_cast<VkDeviceSize>( countBufferOffset ),
  9081. maxDrawCount,
  9082. stride );
  9083. }
  9084. template <typename Dispatch>
  9085. VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
  9086. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  9087. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  9088. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  9089. uint32_t maxDrawCount,
  9090. uint32_t stride,
  9091. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9092. {
  9093. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9094. d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer,
  9095. static_cast<VkBuffer>( buffer ),
  9096. static_cast<VkDeviceSize>( offset ),
  9097. static_cast<VkBuffer>( countBuffer ),
  9098. static_cast<VkDeviceSize>( countBufferOffset ),
  9099. maxDrawCount,
  9100. stride );
  9101. }
  9102. //=== VK_AMD_shader_info ===
  9103. template <typename Dispatch>
  9104. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  9105. VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
  9106. VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
  9107. size_t * pInfoSize,
  9108. void * pInfo,
  9109. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9110. {
  9111. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9112. return static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
  9113. static_cast<VkPipeline>( pipeline ),
  9114. static_cast<VkShaderStageFlagBits>( shaderStage ),
  9115. static_cast<VkShaderInfoTypeAMD>( infoType ),
  9116. pInfoSize,
  9117. pInfo ) );
  9118. }
  9119. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9120. template <typename Uint8_tAllocator, typename Dispatch>
  9121. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
  9122. Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  9123. VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
  9124. VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
  9125. Dispatch const & d ) const
  9126. {
  9127. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9128. std::vector<uint8_t, Uint8_tAllocator> info;
  9129. size_t infoSize;
  9130. VkResult result;
  9131. do
  9132. {
  9133. result = d.vkGetShaderInfoAMD( m_device,
  9134. static_cast<VkPipeline>( pipeline ),
  9135. static_cast<VkShaderStageFlagBits>( shaderStage ),
  9136. static_cast<VkShaderInfoTypeAMD>( infoType ),
  9137. &infoSize,
  9138. nullptr );
  9139. if ( ( result == VK_SUCCESS ) && infoSize )
  9140. {
  9141. info.resize( infoSize );
  9142. result = d.vkGetShaderInfoAMD( m_device,
  9143. static_cast<VkPipeline>( pipeline ),
  9144. static_cast<VkShaderStageFlagBits>( shaderStage ),
  9145. static_cast<VkShaderInfoTypeAMD>( infoType ),
  9146. &infoSize,
  9147. reinterpret_cast<void *>( info.data() ) );
  9148. }
  9149. } while ( result == VK_INCOMPLETE );
  9150. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
  9151. VULKAN_HPP_ASSERT( infoSize <= info.size() );
  9152. if ( infoSize < info.size() )
  9153. {
  9154. info.resize( infoSize );
  9155. }
  9156. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), info );
  9157. }
  9158. template <typename Uint8_tAllocator,
  9159. typename Dispatch,
  9160. typename B1,
  9161. typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type>
  9162. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
  9163. Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  9164. VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
  9165. VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
  9166. Uint8_tAllocator & uint8_tAllocator,
  9167. Dispatch const & d ) const
  9168. {
  9169. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9170. std::vector<uint8_t, Uint8_tAllocator> info( uint8_tAllocator );
  9171. size_t infoSize;
  9172. VkResult result;
  9173. do
  9174. {
  9175. result = d.vkGetShaderInfoAMD( m_device,
  9176. static_cast<VkPipeline>( pipeline ),
  9177. static_cast<VkShaderStageFlagBits>( shaderStage ),
  9178. static_cast<VkShaderInfoTypeAMD>( infoType ),
  9179. &infoSize,
  9180. nullptr );
  9181. if ( ( result == VK_SUCCESS ) && infoSize )
  9182. {
  9183. info.resize( infoSize );
  9184. result = d.vkGetShaderInfoAMD( m_device,
  9185. static_cast<VkPipeline>( pipeline ),
  9186. static_cast<VkShaderStageFlagBits>( shaderStage ),
  9187. static_cast<VkShaderInfoTypeAMD>( infoType ),
  9188. &infoSize,
  9189. reinterpret_cast<void *>( info.data() ) );
  9190. }
  9191. } while ( result == VK_INCOMPLETE );
  9192. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
  9193. VULKAN_HPP_ASSERT( infoSize <= info.size() );
  9194. if ( infoSize < info.size() )
  9195. {
  9196. info.resize( infoSize );
  9197. }
  9198. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), info );
  9199. }
  9200. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9201. //=== VK_KHR_dynamic_rendering ===
  9202. template <typename Dispatch>
  9203. VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,
  9204. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9205. {
  9206. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9207. d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
  9208. }
  9209. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9210. template <typename Dispatch>
  9211. VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,
  9212. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9213. {
  9214. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9215. d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
  9216. }
  9217. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9218. template <typename Dispatch>
  9219. VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9220. {
  9221. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9222. d.vkCmdEndRenderingKHR( m_commandBuffer );
  9223. }
  9224. #if defined( VK_USE_PLATFORM_GGP )
  9225. //=== VK_GGP_stream_descriptor_surface ===
  9226. template <typename Dispatch>
  9227. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  9228. Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,
  9229. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  9230. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  9231. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9232. {
  9233. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9234. return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance,
  9235. reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ),
  9236. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  9237. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  9238. }
  9239. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9240. template <typename Dispatch>
  9241. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  9242. Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,
  9243. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  9244. Dispatch const & d ) const
  9245. {
  9246. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9247. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  9248. VkResult result = d.vkCreateStreamDescriptorSurfaceGGP(
  9249. m_instance,
  9250. reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
  9251. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  9252. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  9253. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" );
  9254. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  9255. }
  9256. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  9257. template <typename Dispatch>
  9258. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  9259. Instance::createStreamDescriptorSurfaceGGPUnique( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,
  9260. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  9261. Dispatch const & d ) const
  9262. {
  9263. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9264. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  9265. VkResult result = d.vkCreateStreamDescriptorSurfaceGGP(
  9266. m_instance,
  9267. reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
  9268. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  9269. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  9270. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" );
  9271. return createResultValueType(
  9272. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  9273. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  9274. }
  9275. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  9276. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9277. #endif /*VK_USE_PLATFORM_GGP*/
  9278. //=== VK_NV_external_memory_capabilities ===
  9279. template <typename Dispatch>
  9280. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  9281. PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format,
  9282. VULKAN_HPP_NAMESPACE::ImageType type,
  9283. VULKAN_HPP_NAMESPACE::ImageTiling tiling,
  9284. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  9285. VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
  9286. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,
  9287. VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,
  9288. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9289. {
  9290. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9291. return static_cast<Result>(
  9292. d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice,
  9293. static_cast<VkFormat>( format ),
  9294. static_cast<VkImageType>( type ),
  9295. static_cast<VkImageTiling>( tiling ),
  9296. static_cast<VkImageUsageFlags>( usage ),
  9297. static_cast<VkImageCreateFlags>( flags ),
  9298. static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
  9299. reinterpret_cast<VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) );
  9300. }
  9301. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9302. template <typename Dispatch>
  9303. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type
  9304. PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format,
  9305. VULKAN_HPP_NAMESPACE::ImageType type,
  9306. VULKAN_HPP_NAMESPACE::ImageTiling tiling,
  9307. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  9308. VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
  9309. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,
  9310. Dispatch const & d ) const
  9311. {
  9312. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9313. VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties;
  9314. VkResult result =
  9315. d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice,
  9316. static_cast<VkFormat>( format ),
  9317. static_cast<VkImageType>( type ),
  9318. static_cast<VkImageTiling>( tiling ),
  9319. static_cast<VkImageUsageFlags>( usage ),
  9320. static_cast<VkImageCreateFlags>( flags ),
  9321. static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
  9322. reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) );
  9323. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" );
  9324. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), externalImageFormatProperties );
  9325. }
  9326. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9327. #if defined( VK_USE_PLATFORM_WIN32_KHR )
  9328. //=== VK_NV_external_memory_win32 ===
  9329. template <typename Dispatch>
  9330. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  9331. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
  9332. HANDLE * pHandle,
  9333. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9334. {
  9335. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9336. return static_cast<Result>(
  9337. d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) );
  9338. }
  9339. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9340. template <typename Dispatch>
  9341. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV(
  9342. VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d ) const
  9343. {
  9344. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9345. HANDLE handle;
  9346. VkResult result =
  9347. d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle );
  9348. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" );
  9349. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
  9350. }
  9351. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9352. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  9353. //=== VK_KHR_get_physical_device_properties2 ===
  9354. template <typename Dispatch>
  9355. VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
  9356. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9357. {
  9358. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9359. d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
  9360. }
  9361. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9362. template <typename Dispatch>
  9363. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
  9364. PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9365. {
  9366. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9367. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
  9368. d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
  9369. return features;
  9370. }
  9371. template <typename X, typename Y, typename... Z, typename Dispatch>
  9372. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9373. {
  9374. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9375. StructureChain<X, Y, Z...> structureChain;
  9376. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
  9377. d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
  9378. return structureChain;
  9379. }
  9380. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9381. template <typename Dispatch>
  9382. VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
  9383. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9384. {
  9385. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9386. d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
  9387. }
  9388. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9389. template <typename Dispatch>
  9390. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
  9391. PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9392. {
  9393. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9394. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
  9395. d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
  9396. return properties;
  9397. }
  9398. template <typename X, typename Y, typename... Z, typename Dispatch>
  9399. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9400. {
  9401. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9402. StructureChain<X, Y, Z...> structureChain;
  9403. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
  9404. d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
  9405. return structureChain;
  9406. }
  9407. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9408. template <typename Dispatch>
  9409. VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
  9410. VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
  9411. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9412. {
  9413. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9414. d.vkGetPhysicalDeviceFormatProperties2KHR(
  9415. m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
  9416. }
  9417. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9418. template <typename Dispatch>
  9419. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
  9420. PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9421. {
  9422. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9423. VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
  9424. d.vkGetPhysicalDeviceFormatProperties2KHR(
  9425. m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
  9426. return formatProperties;
  9427. }
  9428. template <typename X, typename Y, typename... Z, typename Dispatch>
  9429. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
  9430. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9431. {
  9432. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9433. StructureChain<X, Y, Z...> structureChain;
  9434. VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
  9435. d.vkGetPhysicalDeviceFormatProperties2KHR(
  9436. m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
  9437. return structureChain;
  9438. }
  9439. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9440. template <typename Dispatch>
  9441. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  9442. PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
  9443. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
  9444. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9445. {
  9446. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9447. return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
  9448. reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
  9449. reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
  9450. }
  9451. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9452. template <typename Dispatch>
  9453. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
  9454. PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
  9455. {
  9456. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9457. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
  9458. VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
  9459. reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
  9460. reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
  9461. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
  9462. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
  9463. }
  9464. template <typename X, typename Y, typename... Z, typename Dispatch>
  9465. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
  9466. PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
  9467. {
  9468. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9469. StructureChain<X, Y, Z...> structureChain;
  9470. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
  9471. VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
  9472. reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
  9473. reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
  9474. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
  9475. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
  9476. }
  9477. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9478. template <typename Dispatch>
  9479. VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount,
  9480. VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
  9481. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9482. {
  9483. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9484. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
  9485. m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
  9486. }
  9487. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9488. template <typename QueueFamilyProperties2Allocator, typename Dispatch>
  9489. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
  9490. PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const
  9491. {
  9492. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9493. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
  9494. uint32_t queueFamilyPropertyCount;
  9495. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  9496. queueFamilyProperties.resize( queueFamilyPropertyCount );
  9497. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
  9498. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  9499. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  9500. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  9501. {
  9502. queueFamilyProperties.resize( queueFamilyPropertyCount );
  9503. }
  9504. return queueFamilyProperties;
  9505. }
  9506. template <typename QueueFamilyProperties2Allocator,
  9507. typename Dispatch,
  9508. typename B1,
  9509. typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties2>::value, int>::type>
  9510. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
  9511. PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const
  9512. {
  9513. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9514. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator );
  9515. uint32_t queueFamilyPropertyCount;
  9516. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  9517. queueFamilyProperties.resize( queueFamilyPropertyCount );
  9518. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
  9519. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  9520. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  9521. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  9522. {
  9523. queueFamilyProperties.resize( queueFamilyPropertyCount );
  9524. }
  9525. return queueFamilyProperties;
  9526. }
  9527. template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
  9528. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
  9529. PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const
  9530. {
  9531. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9532. std::vector<StructureChain, StructureChainAllocator> structureChains;
  9533. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
  9534. uint32_t queueFamilyPropertyCount;
  9535. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  9536. structureChains.resize( queueFamilyPropertyCount );
  9537. queueFamilyProperties.resize( queueFamilyPropertyCount );
  9538. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  9539. {
  9540. queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
  9541. }
  9542. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
  9543. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  9544. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  9545. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  9546. {
  9547. structureChains.resize( queueFamilyPropertyCount );
  9548. }
  9549. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  9550. {
  9551. structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
  9552. }
  9553. return structureChains;
  9554. }
  9555. template <typename StructureChain,
  9556. typename StructureChainAllocator,
  9557. typename Dispatch,
  9558. typename B1,
  9559. typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type>
  9560. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
  9561. PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const
  9562. {
  9563. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9564. std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
  9565. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
  9566. uint32_t queueFamilyPropertyCount;
  9567. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  9568. structureChains.resize( queueFamilyPropertyCount );
  9569. queueFamilyProperties.resize( queueFamilyPropertyCount );
  9570. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  9571. {
  9572. queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
  9573. }
  9574. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
  9575. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  9576. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  9577. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  9578. {
  9579. structureChains.resize( queueFamilyPropertyCount );
  9580. }
  9581. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  9582. {
  9583. structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
  9584. }
  9585. return structureChains;
  9586. }
  9587. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9588. template <typename Dispatch>
  9589. VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
  9590. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9591. {
  9592. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9593. d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
  9594. }
  9595. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9596. template <typename Dispatch>
  9597. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
  9598. PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9599. {
  9600. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9601. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
  9602. d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
  9603. return memoryProperties;
  9604. }
  9605. template <typename X, typename Y, typename... Z, typename Dispatch>
  9606. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9607. {
  9608. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9609. StructureChain<X, Y, Z...> structureChain;
  9610. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
  9611. structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
  9612. d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
  9613. return structureChain;
  9614. }
  9615. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9616. template <typename Dispatch>
  9617. VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
  9618. uint32_t * pPropertyCount,
  9619. VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
  9620. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9621. {
  9622. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9623. d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
  9624. reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
  9625. pPropertyCount,
  9626. reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
  9627. }
  9628. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9629. template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
  9630. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
  9631. PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
  9632. Dispatch const & d ) const
  9633. {
  9634. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9635. std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
  9636. uint32_t propertyCount;
  9637. d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
  9638. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
  9639. properties.resize( propertyCount );
  9640. d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
  9641. reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
  9642. &propertyCount,
  9643. reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
  9644. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  9645. if ( propertyCount < properties.size() )
  9646. {
  9647. properties.resize( propertyCount );
  9648. }
  9649. return properties;
  9650. }
  9651. template <typename SparseImageFormatProperties2Allocator,
  9652. typename Dispatch,
  9653. typename B1,
  9654. typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties2>::value, int>::type>
  9655. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
  9656. PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
  9657. SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
  9658. Dispatch const & d ) const
  9659. {
  9660. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9661. std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
  9662. uint32_t propertyCount;
  9663. d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
  9664. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
  9665. properties.resize( propertyCount );
  9666. d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
  9667. reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
  9668. &propertyCount,
  9669. reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
  9670. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  9671. if ( propertyCount < properties.size() )
  9672. {
  9673. properties.resize( propertyCount );
  9674. }
  9675. return properties;
  9676. }
  9677. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9678. //=== VK_KHR_device_group ===
  9679. template <typename Dispatch>
  9680. VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex,
  9681. uint32_t localDeviceIndex,
  9682. uint32_t remoteDeviceIndex,
  9683. VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
  9684. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9685. {
  9686. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9687. d.vkGetDeviceGroupPeerMemoryFeaturesKHR(
  9688. m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
  9689. }
  9690. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9691. template <typename Dispatch>
  9692. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR(
  9693. uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9694. {
  9695. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9696. VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
  9697. d.vkGetDeviceGroupPeerMemoryFeaturesKHR(
  9698. m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
  9699. return peerMemoryFeatures;
  9700. }
  9701. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9702. template <typename Dispatch>
  9703. VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9704. {
  9705. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9706. d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
  9707. }
  9708. template <typename Dispatch>
  9709. VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX,
  9710. uint32_t baseGroupY,
  9711. uint32_t baseGroupZ,
  9712. uint32_t groupCountX,
  9713. uint32_t groupCountY,
  9714. uint32_t groupCountZ,
  9715. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9716. {
  9717. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9718. d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
  9719. }
  9720. #if defined( VK_USE_PLATFORM_VI_NN )
  9721. //=== VK_NN_vi_surface ===
  9722. template <typename Dispatch>
  9723. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo,
  9724. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  9725. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  9726. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9727. {
  9728. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9729. return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance,
  9730. reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ),
  9731. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  9732. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  9733. }
  9734. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9735. template <typename Dispatch>
  9736. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  9737. Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,
  9738. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  9739. Dispatch const & d ) const
  9740. {
  9741. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9742. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  9743. VkResult result =
  9744. d.vkCreateViSurfaceNN( m_instance,
  9745. reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
  9746. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  9747. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  9748. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" );
  9749. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  9750. }
  9751. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  9752. template <typename Dispatch>
  9753. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  9754. Instance::createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,
  9755. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  9756. Dispatch const & d ) const
  9757. {
  9758. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9759. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  9760. VkResult result =
  9761. d.vkCreateViSurfaceNN( m_instance,
  9762. reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
  9763. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  9764. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  9765. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" );
  9766. return createResultValueType(
  9767. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  9768. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  9769. }
  9770. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  9771. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9772. #endif /*VK_USE_PLATFORM_VI_NN*/
  9773. //=== VK_KHR_maintenance1 ===
  9774. template <typename Dispatch>
  9775. VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  9776. VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
  9777. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9778. {
  9779. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9780. d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
  9781. }
  9782. //=== VK_KHR_device_group_creation ===
  9783. template <typename Dispatch>
  9784. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  9785. Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount,
  9786. VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
  9787. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9788. {
  9789. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9790. return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
  9791. m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
  9792. }
  9793. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9794. template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
  9795. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  9796. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
  9797. Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const
  9798. {
  9799. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9800. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
  9801. uint32_t physicalDeviceGroupCount;
  9802. VkResult result;
  9803. do
  9804. {
  9805. result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr );
  9806. if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
  9807. {
  9808. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  9809. result = d.vkEnumeratePhysicalDeviceGroupsKHR(
  9810. m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
  9811. }
  9812. } while ( result == VK_INCOMPLETE );
  9813. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
  9814. VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
  9815. if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
  9816. {
  9817. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  9818. }
  9819. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
  9820. }
  9821. template <typename PhysicalDeviceGroupPropertiesAllocator,
  9822. typename Dispatch,
  9823. typename B1,
  9824. typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
  9825. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  9826. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
  9827. Instance::enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
  9828. {
  9829. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9830. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
  9831. physicalDeviceGroupPropertiesAllocator );
  9832. uint32_t physicalDeviceGroupCount;
  9833. VkResult result;
  9834. do
  9835. {
  9836. result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr );
  9837. if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
  9838. {
  9839. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  9840. result = d.vkEnumeratePhysicalDeviceGroupsKHR(
  9841. m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
  9842. }
  9843. } while ( result == VK_INCOMPLETE );
  9844. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
  9845. VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
  9846. if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
  9847. {
  9848. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  9849. }
  9850. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
  9851. }
  9852. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9853. //=== VK_KHR_external_memory_capabilities ===
  9854. template <typename Dispatch>
  9855. VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
  9856. VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
  9857. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9858. {
  9859. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9860. d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice,
  9861. reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
  9862. reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
  9863. }
  9864. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9865. template <typename Dispatch>
  9866. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties
  9867. PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,
  9868. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9869. {
  9870. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9871. VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
  9872. d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice,
  9873. reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
  9874. reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
  9875. return externalBufferProperties;
  9876. }
  9877. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9878. #if defined( VK_USE_PLATFORM_WIN32_KHR )
  9879. //=== VK_KHR_external_memory_win32 ===
  9880. template <typename Dispatch>
  9881. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,
  9882. HANDLE * pHandle,
  9883. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9884. {
  9885. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9886. return static_cast<Result>(
  9887. d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
  9888. }
  9889. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9890. template <typename Dispatch>
  9891. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
  9892. Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
  9893. {
  9894. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9895. HANDLE handle;
  9896. VkResult result = d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
  9897. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" );
  9898. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
  9899. }
  9900. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9901. template <typename Dispatch>
  9902. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  9903. Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
  9904. HANDLE handle,
  9905. VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,
  9906. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9907. {
  9908. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9909. return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device,
  9910. static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
  9911. handle,
  9912. reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) );
  9913. }
  9914. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9915. template <typename Dispatch>
  9916. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type
  9917. Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d ) const
  9918. {
  9919. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9920. VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
  9921. VkResult result = d.vkGetMemoryWin32HandlePropertiesKHR( m_device,
  9922. static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
  9923. handle,
  9924. reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) );
  9925. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" );
  9926. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryWin32HandleProperties );
  9927. }
  9928. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9929. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  9930. //=== VK_KHR_external_memory_fd ===
  9931. template <typename Dispatch>
  9932. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,
  9933. int * pFd,
  9934. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9935. {
  9936. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9937. return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
  9938. }
  9939. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9940. template <typename Dispatch>
  9941. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo,
  9942. Dispatch const & d ) const
  9943. {
  9944. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9945. int fd;
  9946. VkResult result = d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd );
  9947. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" );
  9948. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd );
  9949. }
  9950. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9951. template <typename Dispatch>
  9952. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
  9953. int fd,
  9954. VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties,
  9955. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9956. {
  9957. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9958. return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR(
  9959. m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) );
  9960. }
  9961. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9962. template <typename Dispatch>
  9963. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type
  9964. Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d ) const
  9965. {
  9966. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9967. VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties;
  9968. VkResult result = d.vkGetMemoryFdPropertiesKHR(
  9969. m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) );
  9970. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" );
  9971. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryFdProperties );
  9972. }
  9973. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  9974. //=== VK_KHR_external_semaphore_capabilities ===
  9975. template <typename Dispatch>
  9976. VULKAN_HPP_INLINE void
  9977. PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
  9978. VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
  9979. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9980. {
  9981. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9982. d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice,
  9983. reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
  9984. reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
  9985. }
  9986. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9987. template <typename Dispatch>
  9988. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
  9989. PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
  9990. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9991. {
  9992. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9993. VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
  9994. d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice,
  9995. reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
  9996. reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
  9997. return externalSemaphoreProperties;
  9998. }
  9999. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10000. #if defined( VK_USE_PLATFORM_WIN32_KHR )
  10001. //=== VK_KHR_external_semaphore_win32 ===
  10002. template <typename Dispatch>
  10003. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR(
  10004. const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10005. {
  10006. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10007. return static_cast<Result>(
  10008. d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) );
  10009. }
  10010. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10011. template <typename Dispatch>
  10012. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  10013. Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,
  10014. Dispatch const & d ) const
  10015. {
  10016. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10017. VkResult result =
  10018. d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) );
  10019. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" );
  10020. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  10021. }
  10022. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10023. template <typename Dispatch>
  10024. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR(
  10025. const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10026. {
  10027. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10028. return static_cast<Result>(
  10029. d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
  10030. }
  10031. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10032. template <typename Dispatch>
  10033. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
  10034. Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
  10035. {
  10036. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10037. HANDLE handle;
  10038. VkResult result = d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
  10039. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" );
  10040. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
  10041. }
  10042. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10043. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  10044. //=== VK_KHR_external_semaphore_fd ===
  10045. template <typename Dispatch>
  10046. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,
  10047. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10048. {
  10049. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10050. return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) );
  10051. }
  10052. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10053. template <typename Dispatch>
  10054. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  10055. Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const
  10056. {
  10057. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10058. VkResult result = d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) );
  10059. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" );
  10060. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  10061. }
  10062. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10063. template <typename Dispatch>
  10064. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,
  10065. int * pFd,
  10066. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10067. {
  10068. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10069. return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
  10070. }
  10071. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10072. template <typename Dispatch>
  10073. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type
  10074. Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
  10075. {
  10076. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10077. int fd;
  10078. VkResult result = d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd );
  10079. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" );
  10080. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd );
  10081. }
  10082. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10083. //=== VK_KHR_push_descriptor ===
  10084. template <typename Dispatch>
  10085. VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  10086. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  10087. uint32_t set,
  10088. uint32_t descriptorWriteCount,
  10089. const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
  10090. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10091. {
  10092. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10093. d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
  10094. static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
  10095. static_cast<VkPipelineLayout>( layout ),
  10096. set,
  10097. descriptorWriteCount,
  10098. reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) );
  10099. }
  10100. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10101. template <typename Dispatch>
  10102. VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  10103. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  10104. uint32_t set,
  10105. ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
  10106. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10107. {
  10108. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10109. d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
  10110. static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
  10111. static_cast<VkPipelineLayout>( layout ),
  10112. set,
  10113. descriptorWrites.size(),
  10114. reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) );
  10115. }
  10116. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10117. template <typename Dispatch>
  10118. VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  10119. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  10120. uint32_t set,
  10121. const void * pData,
  10122. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10123. {
  10124. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10125. d.vkCmdPushDescriptorSetWithTemplateKHR(
  10126. m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
  10127. }
  10128. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10129. template <typename DataType, typename Dispatch>
  10130. VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  10131. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  10132. uint32_t set,
  10133. DataType const & data,
  10134. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10135. {
  10136. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10137. d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer,
  10138. static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
  10139. static_cast<VkPipelineLayout>( layout ),
  10140. set,
  10141. reinterpret_cast<const void *>( &data ) );
  10142. }
  10143. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10144. //=== VK_EXT_conditional_rendering ===
  10145. template <typename Dispatch>
  10146. VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin,
  10147. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10148. {
  10149. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10150. d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) );
  10151. }
  10152. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10153. template <typename Dispatch>
  10154. VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin,
  10155. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10156. {
  10157. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10158. d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) );
  10159. }
  10160. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10161. template <typename Dispatch>
  10162. VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10163. {
  10164. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10165. d.vkCmdEndConditionalRenderingEXT( m_commandBuffer );
  10166. }
  10167. //=== VK_KHR_descriptor_update_template ===
  10168. template <typename Dispatch>
  10169. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  10170. Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
  10171. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  10172. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
  10173. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10174. {
  10175. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10176. return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device,
  10177. reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
  10178. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  10179. reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
  10180. }
  10181. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10182. template <typename Dispatch>
  10183. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
  10184. Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
  10185. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  10186. Dispatch const & d ) const
  10187. {
  10188. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10189. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
  10190. VkResult result = d.vkCreateDescriptorUpdateTemplateKHR(
  10191. m_device,
  10192. reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
  10193. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  10194. reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
  10195. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" );
  10196. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorUpdateTemplate );
  10197. }
  10198. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  10199. template <typename Dispatch>
  10200. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
  10201. Device::createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
  10202. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  10203. Dispatch const & d ) const
  10204. {
  10205. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10206. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
  10207. VkResult result = d.vkCreateDescriptorUpdateTemplateKHR(
  10208. m_device,
  10209. reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
  10210. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  10211. reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
  10212. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" );
  10213. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  10214. UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>(
  10215. descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  10216. }
  10217. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  10218. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10219. template <typename Dispatch>
  10220. VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  10221. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  10222. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10223. {
  10224. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10225. d.vkDestroyDescriptorUpdateTemplateKHR(
  10226. m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  10227. }
  10228. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10229. template <typename Dispatch>
  10230. VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  10231. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  10232. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10233. {
  10234. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10235. d.vkDestroyDescriptorUpdateTemplateKHR(
  10236. m_device,
  10237. static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
  10238. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  10239. }
  10240. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10241. template <typename Dispatch>
  10242. VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
  10243. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  10244. const void * pData,
  10245. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10246. {
  10247. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10248. d.vkUpdateDescriptorSetWithTemplateKHR(
  10249. m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
  10250. }
  10251. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10252. template <typename DataType, typename Dispatch>
  10253. VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
  10254. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  10255. DataType const & data,
  10256. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10257. {
  10258. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10259. d.vkUpdateDescriptorSetWithTemplateKHR( m_device,
  10260. static_cast<VkDescriptorSet>( descriptorSet ),
  10261. static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
  10262. reinterpret_cast<const void *>( &data ) );
  10263. }
  10264. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10265. //=== VK_NV_clip_space_w_scaling ===
  10266. template <typename Dispatch>
  10267. VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport,
  10268. uint32_t viewportCount,
  10269. const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings,
  10270. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10271. {
  10272. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10273. d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) );
  10274. }
  10275. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10276. template <typename Dispatch>
  10277. VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport,
  10278. ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings,
  10279. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10280. {
  10281. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10282. d.vkCmdSetViewportWScalingNV(
  10283. m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) );
  10284. }
  10285. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10286. //=== VK_EXT_direct_mode_display ===
  10287. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10288. template <typename Dispatch>
  10289. VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10290. {
  10291. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10292. return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
  10293. }
  10294. #else
  10295. template <typename Dispatch>
  10296. VULKAN_HPP_INLINE void PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10297. {
  10298. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10299. d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) );
  10300. }
  10301. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10302. #if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
  10303. //=== VK_EXT_acquire_xlib_display ===
  10304. template <typename Dispatch>
  10305. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display * dpy,
  10306. VULKAN_HPP_NAMESPACE::DisplayKHR display,
  10307. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10308. {
  10309. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10310. return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
  10311. }
  10312. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10313. template <typename Dispatch>
  10314. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  10315. PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  10316. {
  10317. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10318. VkResult result = d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) );
  10319. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" );
  10320. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  10321. }
  10322. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10323. template <typename Dispatch>
  10324. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display * dpy,
  10325. RROutput rrOutput,
  10326. VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
  10327. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10328. {
  10329. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10330. return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
  10331. }
  10332. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10333. template <typename Dispatch>
  10334. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
  10335. PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
  10336. {
  10337. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10338. VULKAN_HPP_NAMESPACE::DisplayKHR display;
  10339. VkResult result = d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) );
  10340. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" );
  10341. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), display );
  10342. }
  10343. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  10344. template <typename Dispatch>
  10345. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
  10346. PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
  10347. {
  10348. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10349. VULKAN_HPP_NAMESPACE::DisplayKHR display;
  10350. VkResult result = d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) );
  10351. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" );
  10352. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  10353. UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) );
  10354. }
  10355. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  10356. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10357. #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
  10358. //=== VK_EXT_display_surface_counter ===
  10359. template <typename Dispatch>
  10360. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  10361. PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  10362. VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities,
  10363. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10364. {
  10365. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10366. return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
  10367. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) );
  10368. }
  10369. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10370. template <typename Dispatch>
  10371. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type
  10372. PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  10373. {
  10374. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10375. VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities;
  10376. VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
  10377. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) );
  10378. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" );
  10379. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
  10380. }
  10381. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10382. //=== VK_EXT_display_control ===
  10383. template <typename Dispatch>
  10384. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  10385. const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo,
  10386. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10387. {
  10388. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10389. return static_cast<Result>(
  10390. d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) );
  10391. }
  10392. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10393. template <typename Dispatch>
  10394. VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  10395. const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo,
  10396. Dispatch const & d ) const
  10397. {
  10398. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10399. VkResult result =
  10400. d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) );
  10401. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" );
  10402. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  10403. }
  10404. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10405. template <typename Dispatch>
  10406. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo,
  10407. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  10408. VULKAN_HPP_NAMESPACE::Fence * pFence,
  10409. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10410. {
  10411. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10412. return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device,
  10413. reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ),
  10414. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  10415. reinterpret_cast<VkFence *>( pFence ) ) );
  10416. }
  10417. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10418. template <typename Dispatch>
  10419. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
  10420. Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,
  10421. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  10422. Dispatch const & d ) const
  10423. {
  10424. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10425. VULKAN_HPP_NAMESPACE::Fence fence;
  10426. VkResult result = d.vkRegisterDeviceEventEXT(
  10427. m_device,
  10428. reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
  10429. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  10430. reinterpret_cast<VkFence *>( &fence ) );
  10431. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" );
  10432. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
  10433. }
  10434. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  10435. template <typename Dispatch>
  10436. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
  10437. Device::registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,
  10438. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  10439. Dispatch const & d ) const
  10440. {
  10441. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10442. VULKAN_HPP_NAMESPACE::Fence fence;
  10443. VkResult result = d.vkRegisterDeviceEventEXT(
  10444. m_device,
  10445. reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
  10446. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  10447. reinterpret_cast<VkFence *>( &fence ) );
  10448. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" );
  10449. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  10450. UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  10451. }
  10452. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  10453. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10454. template <typename Dispatch>
  10455. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  10456. const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,
  10457. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  10458. VULKAN_HPP_NAMESPACE::Fence * pFence,
  10459. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10460. {
  10461. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10462. return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device,
  10463. static_cast<VkDisplayKHR>( display ),
  10464. reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ),
  10465. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  10466. reinterpret_cast<VkFence *>( pFence ) ) );
  10467. }
  10468. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10469. template <typename Dispatch>
  10470. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
  10471. Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  10472. const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,
  10473. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  10474. Dispatch const & d ) const
  10475. {
  10476. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10477. VULKAN_HPP_NAMESPACE::Fence fence;
  10478. VkResult result = d.vkRegisterDisplayEventEXT(
  10479. m_device,
  10480. static_cast<VkDisplayKHR>( display ),
  10481. reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
  10482. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  10483. reinterpret_cast<VkFence *>( &fence ) );
  10484. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" );
  10485. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
  10486. }
  10487. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  10488. template <typename Dispatch>
  10489. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
  10490. Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  10491. const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,
  10492. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  10493. Dispatch const & d ) const
  10494. {
  10495. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10496. VULKAN_HPP_NAMESPACE::Fence fence;
  10497. VkResult result = d.vkRegisterDisplayEventEXT(
  10498. m_device,
  10499. static_cast<VkDisplayKHR>( display ),
  10500. reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
  10501. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  10502. reinterpret_cast<VkFence *>( &fence ) );
  10503. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" );
  10504. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  10505. UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  10506. }
  10507. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  10508. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10509. template <typename Dispatch>
  10510. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  10511. VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
  10512. uint64_t * pCounterValue,
  10513. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10514. {
  10515. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10516. return static_cast<Result>(
  10517. d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
  10518. }
  10519. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10520. template <typename Dispatch>
  10521. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT(
  10522. VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const
  10523. {
  10524. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10525. uint64_t counterValue;
  10526. VkResult result =
  10527. d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue );
  10528. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" );
  10529. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), counterValue );
  10530. }
  10531. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10532. //=== VK_GOOGLE_display_timing ===
  10533. template <typename Dispatch>
  10534. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  10535. Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  10536. VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties,
  10537. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10538. {
  10539. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10540. return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE(
  10541. m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) );
  10542. }
  10543. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10544. template <typename Dispatch>
  10545. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type
  10546. Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  10547. {
  10548. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10549. VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties;
  10550. VkResult result = d.vkGetRefreshCycleDurationGOOGLE(
  10551. m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) );
  10552. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" );
  10553. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displayTimingProperties );
  10554. }
  10555. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10556. template <typename Dispatch>
  10557. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  10558. Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  10559. uint32_t * pPresentationTimingCount,
  10560. VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings,
  10561. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10562. {
  10563. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10564. return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device,
  10565. static_cast<VkSwapchainKHR>( swapchain ),
  10566. pPresentationTimingCount,
  10567. reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) );
  10568. }
  10569. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10570. template <typename PastPresentationTimingGOOGLEAllocator, typename Dispatch>
  10571. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  10572. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
  10573. Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  10574. {
  10575. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10576. std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings;
  10577. uint32_t presentationTimingCount;
  10578. VkResult result;
  10579. do
  10580. {
  10581. result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr );
  10582. if ( ( result == VK_SUCCESS ) && presentationTimingCount )
  10583. {
  10584. presentationTimings.resize( presentationTimingCount );
  10585. result = d.vkGetPastPresentationTimingGOOGLE( m_device,
  10586. static_cast<VkSwapchainKHR>( swapchain ),
  10587. &presentationTimingCount,
  10588. reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) );
  10589. }
  10590. } while ( result == VK_INCOMPLETE );
  10591. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
  10592. VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
  10593. if ( presentationTimingCount < presentationTimings.size() )
  10594. {
  10595. presentationTimings.resize( presentationTimingCount );
  10596. }
  10597. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentationTimings );
  10598. }
  10599. template <typename PastPresentationTimingGOOGLEAllocator,
  10600. typename Dispatch,
  10601. typename B1,
  10602. typename std::enable_if<std::is_same<typename B1::value_type, PastPresentationTimingGOOGLE>::value, int>::type>
  10603. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  10604. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
  10605. Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  10606. PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,
  10607. Dispatch const & d ) const
  10608. {
  10609. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10610. std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings(
  10611. pastPresentationTimingGOOGLEAllocator );
  10612. uint32_t presentationTimingCount;
  10613. VkResult result;
  10614. do
  10615. {
  10616. result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr );
  10617. if ( ( result == VK_SUCCESS ) && presentationTimingCount )
  10618. {
  10619. presentationTimings.resize( presentationTimingCount );
  10620. result = d.vkGetPastPresentationTimingGOOGLE( m_device,
  10621. static_cast<VkSwapchainKHR>( swapchain ),
  10622. &presentationTimingCount,
  10623. reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) );
  10624. }
  10625. } while ( result == VK_INCOMPLETE );
  10626. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
  10627. VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
  10628. if ( presentationTimingCount < presentationTimings.size() )
  10629. {
  10630. presentationTimings.resize( presentationTimingCount );
  10631. }
  10632. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentationTimings );
  10633. }
  10634. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10635. //=== VK_EXT_discard_rectangles ===
  10636. template <typename Dispatch>
  10637. VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
  10638. uint32_t discardRectangleCount,
  10639. const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,
  10640. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10641. {
  10642. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10643. d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) );
  10644. }
  10645. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10646. template <typename Dispatch>
  10647. VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
  10648. ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,
  10649. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10650. {
  10651. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10652. d.vkCmdSetDiscardRectangleEXT(
  10653. m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) );
  10654. }
  10655. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10656. //=== VK_EXT_hdr_metadata ===
  10657. template <typename Dispatch>
  10658. VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount,
  10659. const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
  10660. const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata,
  10661. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10662. {
  10663. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10664. d.vkSetHdrMetadataEXT(
  10665. m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) );
  10666. }
  10667. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10668. template <typename Dispatch>
  10669. VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
  10670. ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,
  10671. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  10672. {
  10673. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10674. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  10675. VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() );
  10676. # else
  10677. if ( swapchains.size() != metadata.size() )
  10678. {
  10679. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
  10680. }
  10681. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  10682. d.vkSetHdrMetadataEXT( m_device,
  10683. swapchains.size(),
  10684. reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ),
  10685. reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) );
  10686. }
  10687. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10688. //=== VK_KHR_create_renderpass2 ===
  10689. template <typename Dispatch>
  10690. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
  10691. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  10692. VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
  10693. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10694. {
  10695. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10696. return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device,
  10697. reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
  10698. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  10699. reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
  10700. }
  10701. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10702. template <typename Dispatch>
  10703. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
  10704. Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
  10705. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  10706. Dispatch const & d ) const
  10707. {
  10708. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10709. VULKAN_HPP_NAMESPACE::RenderPass renderPass;
  10710. VkResult result =
  10711. d.vkCreateRenderPass2KHR( m_device,
  10712. reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
  10713. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  10714. reinterpret_cast<VkRenderPass *>( &renderPass ) );
  10715. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" );
  10716. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
  10717. }
  10718. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  10719. template <typename Dispatch>
  10720. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
  10721. Device::createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
  10722. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  10723. Dispatch const & d ) const
  10724. {
  10725. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10726. VULKAN_HPP_NAMESPACE::RenderPass renderPass;
  10727. VkResult result =
  10728. d.vkCreateRenderPass2KHR( m_device,
  10729. reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
  10730. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  10731. reinterpret_cast<VkRenderPass *>( &renderPass ) );
  10732. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" );
  10733. return createResultValueType(
  10734. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  10735. UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  10736. }
  10737. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  10738. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10739. template <typename Dispatch>
  10740. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
  10741. const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
  10742. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10743. {
  10744. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10745. d.vkCmdBeginRenderPass2KHR(
  10746. m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
  10747. }
  10748. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10749. template <typename Dispatch>
  10750. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
  10751. const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
  10752. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10753. {
  10754. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10755. d.vkCmdBeginRenderPass2KHR(
  10756. m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
  10757. }
  10758. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10759. template <typename Dispatch>
  10760. VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
  10761. const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
  10762. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10763. {
  10764. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10765. d.vkCmdNextSubpass2KHR(
  10766. m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
  10767. }
  10768. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10769. template <typename Dispatch>
  10770. VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
  10771. const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
  10772. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10773. {
  10774. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10775. d.vkCmdNextSubpass2KHR(
  10776. m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  10777. }
  10778. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10779. template <typename Dispatch>
  10780. VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
  10781. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10782. {
  10783. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10784. d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
  10785. }
  10786. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10787. template <typename Dispatch>
  10788. VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
  10789. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10790. {
  10791. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10792. d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  10793. }
  10794. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10795. //=== VK_KHR_shared_presentable_image ===
  10796. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10797. template <typename Dispatch>
  10798. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  10799. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10800. {
  10801. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10802. return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
  10803. }
  10804. #else
  10805. template <typename Dispatch>
  10806. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  10807. Dispatch const & d ) const
  10808. {
  10809. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10810. VkResult result = d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
  10811. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  10812. VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR",
  10813. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  10814. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  10815. }
  10816. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10817. //=== VK_KHR_external_fence_capabilities ===
  10818. template <typename Dispatch>
  10819. VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
  10820. VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
  10821. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10822. {
  10823. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10824. d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice,
  10825. reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
  10826. reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
  10827. }
  10828. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10829. template <typename Dispatch>
  10830. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
  10831. PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,
  10832. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10833. {
  10834. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10835. VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
  10836. d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice,
  10837. reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
  10838. reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
  10839. return externalFenceProperties;
  10840. }
  10841. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10842. #if defined( VK_USE_PLATFORM_WIN32_KHR )
  10843. //=== VK_KHR_external_fence_win32 ===
  10844. template <typename Dispatch>
  10845. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR(
  10846. const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10847. {
  10848. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10849. return static_cast<Result>(
  10850. d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) );
  10851. }
  10852. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10853. template <typename Dispatch>
  10854. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  10855. Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const
  10856. {
  10857. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10858. VkResult result = d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) );
  10859. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" );
  10860. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  10861. }
  10862. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10863. template <typename Dispatch>
  10864. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,
  10865. HANDLE * pHandle,
  10866. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10867. {
  10868. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10869. return static_cast<Result>(
  10870. d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
  10871. }
  10872. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10873. template <typename Dispatch>
  10874. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
  10875. Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
  10876. {
  10877. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10878. HANDLE handle;
  10879. VkResult result = d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
  10880. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" );
  10881. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
  10882. }
  10883. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10884. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  10885. //=== VK_KHR_external_fence_fd ===
  10886. template <typename Dispatch>
  10887. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,
  10888. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10889. {
  10890. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10891. return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) );
  10892. }
  10893. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10894. template <typename Dispatch>
  10895. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  10896. Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const
  10897. {
  10898. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10899. VkResult result = d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) );
  10900. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" );
  10901. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  10902. }
  10903. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10904. template <typename Dispatch>
  10905. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo,
  10906. int * pFd,
  10907. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10908. {
  10909. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10910. return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
  10911. }
  10912. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10913. template <typename Dispatch>
  10914. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo,
  10915. Dispatch const & d ) const
  10916. {
  10917. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10918. int fd;
  10919. VkResult result = d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd );
  10920. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" );
  10921. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd );
  10922. }
  10923. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10924. //=== VK_KHR_performance_query ===
  10925. template <typename Dispatch>
  10926. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  10927. PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex,
  10928. uint32_t * pCounterCount,
  10929. VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters,
  10930. VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,
  10931. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10932. {
  10933. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10934. return static_cast<Result>(
  10935. d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice,
  10936. queueFamilyIndex,
  10937. pCounterCount,
  10938. reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ),
  10939. reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) );
  10940. }
  10941. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10942. template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch>
  10943. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  10944. typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
  10945. std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
  10946. PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const
  10947. {
  10948. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10949. std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
  10950. std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
  10951. data;
  10952. std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first;
  10953. std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data.second;
  10954. uint32_t counterCount;
  10955. VkResult result;
  10956. do
  10957. {
  10958. result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr );
  10959. if ( ( result == VK_SUCCESS ) && counterCount )
  10960. {
  10961. counters.resize( counterCount );
  10962. counterDescriptions.resize( counterCount );
  10963. result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
  10964. m_physicalDevice,
  10965. queueFamilyIndex,
  10966. &counterCount,
  10967. reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
  10968. reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) );
  10969. }
  10970. } while ( result == VK_INCOMPLETE );
  10971. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  10972. VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
  10973. VULKAN_HPP_ASSERT( counterCount <= counters.size() );
  10974. if ( counterCount < counters.size() )
  10975. {
  10976. counters.resize( counterCount );
  10977. counterDescriptions.resize( counterCount );
  10978. }
  10979. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  10980. }
  10981. template <typename PerformanceCounterKHRAllocator,
  10982. typename PerformanceCounterDescriptionKHRAllocator,
  10983. typename Dispatch,
  10984. typename B1,
  10985. typename B2,
  10986. typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value &&
  10987. std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value,
  10988. int>::type>
  10989. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  10990. typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
  10991. std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
  10992. PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex,
  10993. PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,
  10994. PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,
  10995. Dispatch const & d ) const
  10996. {
  10997. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10998. std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
  10999. std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
  11000. data(
  11001. std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) );
  11002. std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first;
  11003. std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data.second;
  11004. uint32_t counterCount;
  11005. VkResult result;
  11006. do
  11007. {
  11008. result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr );
  11009. if ( ( result == VK_SUCCESS ) && counterCount )
  11010. {
  11011. counters.resize( counterCount );
  11012. counterDescriptions.resize( counterCount );
  11013. result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
  11014. m_physicalDevice,
  11015. queueFamilyIndex,
  11016. &counterCount,
  11017. reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
  11018. reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) );
  11019. }
  11020. } while ( result == VK_INCOMPLETE );
  11021. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  11022. VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
  11023. VULKAN_HPP_ASSERT( counterCount <= counters.size() );
  11024. if ( counterCount < counters.size() )
  11025. {
  11026. counters.resize( counterCount );
  11027. counterDescriptions.resize( counterCount );
  11028. }
  11029. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  11030. }
  11031. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11032. template <typename Dispatch>
  11033. VULKAN_HPP_INLINE void
  11034. PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,
  11035. uint32_t * pNumPasses,
  11036. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11037. {
  11038. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11039. d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
  11040. m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), pNumPasses );
  11041. }
  11042. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11043. template <typename Dispatch>
  11044. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR(
  11045. const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11046. {
  11047. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11048. uint32_t numPasses;
  11049. d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
  11050. m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ), &numPasses );
  11051. return numPasses;
  11052. }
  11053. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11054. template <typename Dispatch>
  11055. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo,
  11056. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11057. {
  11058. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11059. return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) );
  11060. }
  11061. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11062. template <typename Dispatch>
  11063. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  11064. Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const
  11065. {
  11066. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11067. VkResult result = d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) );
  11068. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" );
  11069. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  11070. }
  11071. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11072. template <typename Dispatch>
  11073. VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11074. {
  11075. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11076. d.vkReleaseProfilingLockKHR( m_device );
  11077. }
  11078. //=== VK_KHR_get_surface_capabilities2 ===
  11079. template <typename Dispatch>
  11080. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  11081. PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
  11082. VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities,
  11083. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11084. {
  11085. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11086. return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
  11087. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
  11088. reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) );
  11089. }
  11090. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11091. template <typename Dispatch>
  11092. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type
  11093. PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
  11094. {
  11095. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11096. VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities;
  11097. VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
  11098. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  11099. reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
  11100. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
  11101. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
  11102. }
  11103. template <typename X, typename Y, typename... Z, typename Dispatch>
  11104. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
  11105. PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
  11106. {
  11107. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11108. StructureChain<X, Y, Z...> structureChain;
  11109. VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
  11110. VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
  11111. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  11112. reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
  11113. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
  11114. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
  11115. }
  11116. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11117. template <typename Dispatch>
  11118. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
  11119. uint32_t * pSurfaceFormatCount,
  11120. VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats,
  11121. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11122. {
  11123. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11124. return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
  11125. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
  11126. pSurfaceFormatCount,
  11127. reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) );
  11128. }
  11129. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11130. template <typename SurfaceFormat2KHRAllocator, typename Dispatch>
  11131. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
  11132. PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
  11133. {
  11134. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11135. std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats;
  11136. uint32_t surfaceFormatCount;
  11137. VkResult result;
  11138. do
  11139. {
  11140. result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
  11141. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
  11142. if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
  11143. {
  11144. surfaceFormats.resize( surfaceFormatCount );
  11145. result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
  11146. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  11147. &surfaceFormatCount,
  11148. reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
  11149. }
  11150. } while ( result == VK_INCOMPLETE );
  11151. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
  11152. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  11153. if ( surfaceFormatCount < surfaceFormats.size() )
  11154. {
  11155. surfaceFormats.resize( surfaceFormatCount );
  11156. }
  11157. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
  11158. }
  11159. template <typename SurfaceFormat2KHRAllocator,
  11160. typename Dispatch,
  11161. typename B1,
  11162. typename std::enable_if<std::is_same<typename B1::value_type, SurfaceFormat2KHR>::value, int>::type>
  11163. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
  11164. PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
  11165. SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator,
  11166. Dispatch const & d ) const
  11167. {
  11168. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11169. std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator );
  11170. uint32_t surfaceFormatCount;
  11171. VkResult result;
  11172. do
  11173. {
  11174. result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
  11175. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
  11176. if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
  11177. {
  11178. surfaceFormats.resize( surfaceFormatCount );
  11179. result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
  11180. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  11181. &surfaceFormatCount,
  11182. reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
  11183. }
  11184. } while ( result == VK_INCOMPLETE );
  11185. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
  11186. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  11187. if ( surfaceFormatCount < surfaceFormats.size() )
  11188. {
  11189. surfaceFormats.resize( surfaceFormatCount );
  11190. }
  11191. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
  11192. }
  11193. template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
  11194. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type
  11195. PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
  11196. {
  11197. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11198. std::vector<StructureChain, StructureChainAllocator> structureChains;
  11199. std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
  11200. uint32_t surfaceFormatCount;
  11201. VkResult result;
  11202. do
  11203. {
  11204. result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
  11205. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
  11206. if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
  11207. {
  11208. structureChains.resize( surfaceFormatCount );
  11209. surfaceFormats.resize( surfaceFormatCount );
  11210. for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
  11211. {
  11212. surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
  11213. }
  11214. result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
  11215. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  11216. &surfaceFormatCount,
  11217. reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
  11218. }
  11219. } while ( result == VK_INCOMPLETE );
  11220. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
  11221. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  11222. if ( surfaceFormatCount < surfaceFormats.size() )
  11223. {
  11224. structureChains.resize( surfaceFormatCount );
  11225. }
  11226. for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
  11227. {
  11228. structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
  11229. }
  11230. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChains );
  11231. }
  11232. template <typename StructureChain,
  11233. typename StructureChainAllocator,
  11234. typename Dispatch,
  11235. typename B1,
  11236. typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type>
  11237. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type
  11238. PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
  11239. StructureChainAllocator & structureChainAllocator,
  11240. Dispatch const & d ) const
  11241. {
  11242. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11243. std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
  11244. std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
  11245. uint32_t surfaceFormatCount;
  11246. VkResult result;
  11247. do
  11248. {
  11249. result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
  11250. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
  11251. if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
  11252. {
  11253. structureChains.resize( surfaceFormatCount );
  11254. surfaceFormats.resize( surfaceFormatCount );
  11255. for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
  11256. {
  11257. surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
  11258. }
  11259. result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
  11260. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  11261. &surfaceFormatCount,
  11262. reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
  11263. }
  11264. } while ( result == VK_INCOMPLETE );
  11265. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
  11266. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  11267. if ( surfaceFormatCount < surfaceFormats.size() )
  11268. {
  11269. structureChains.resize( surfaceFormatCount );
  11270. }
  11271. for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
  11272. {
  11273. structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
  11274. }
  11275. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChains );
  11276. }
  11277. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11278. //=== VK_KHR_get_display_properties2 ===
  11279. template <typename Dispatch>
  11280. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount,
  11281. VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,
  11282. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11283. {
  11284. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11285. return static_cast<Result>(
  11286. d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) );
  11287. }
  11288. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11289. template <typename DisplayProperties2KHRAllocator, typename Dispatch>
  11290. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  11291. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
  11292. PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const
  11293. {
  11294. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11295. std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties;
  11296. uint32_t propertyCount;
  11297. VkResult result;
  11298. do
  11299. {
  11300. result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
  11301. if ( ( result == VK_SUCCESS ) && propertyCount )
  11302. {
  11303. properties.resize( propertyCount );
  11304. result =
  11305. d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) );
  11306. }
  11307. } while ( result == VK_INCOMPLETE );
  11308. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
  11309. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  11310. if ( propertyCount < properties.size() )
  11311. {
  11312. properties.resize( propertyCount );
  11313. }
  11314. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  11315. }
  11316. template <typename DisplayProperties2KHRAllocator,
  11317. typename Dispatch,
  11318. typename B1,
  11319. typename std::enable_if<std::is_same<typename B1::value_type, DisplayProperties2KHR>::value, int>::type>
  11320. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  11321. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
  11322. PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d ) const
  11323. {
  11324. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11325. std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator );
  11326. uint32_t propertyCount;
  11327. VkResult result;
  11328. do
  11329. {
  11330. result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
  11331. if ( ( result == VK_SUCCESS ) && propertyCount )
  11332. {
  11333. properties.resize( propertyCount );
  11334. result =
  11335. d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) );
  11336. }
  11337. } while ( result == VK_INCOMPLETE );
  11338. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
  11339. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  11340. if ( propertyCount < properties.size() )
  11341. {
  11342. properties.resize( propertyCount );
  11343. }
  11344. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  11345. }
  11346. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11347. template <typename Dispatch>
  11348. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount,
  11349. VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,
  11350. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11351. {
  11352. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11353. return static_cast<Result>(
  11354. d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) );
  11355. }
  11356. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11357. template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch>
  11358. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  11359. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
  11360. PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const
  11361. {
  11362. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11363. std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties;
  11364. uint32_t propertyCount;
  11365. VkResult result;
  11366. do
  11367. {
  11368. result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
  11369. if ( ( result == VK_SUCCESS ) && propertyCount )
  11370. {
  11371. properties.resize( propertyCount );
  11372. result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
  11373. m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) );
  11374. }
  11375. } while ( result == VK_INCOMPLETE );
  11376. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
  11377. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  11378. if ( propertyCount < properties.size() )
  11379. {
  11380. properties.resize( propertyCount );
  11381. }
  11382. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  11383. }
  11384. template <typename DisplayPlaneProperties2KHRAllocator,
  11385. typename Dispatch,
  11386. typename B1,
  11387. typename std::enable_if<std::is_same<typename B1::value_type, DisplayPlaneProperties2KHR>::value, int>::type>
  11388. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  11389. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
  11390. PhysicalDevice::getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const
  11391. {
  11392. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11393. std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties( displayPlaneProperties2KHRAllocator );
  11394. uint32_t propertyCount;
  11395. VkResult result;
  11396. do
  11397. {
  11398. result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
  11399. if ( ( result == VK_SUCCESS ) && propertyCount )
  11400. {
  11401. properties.resize( propertyCount );
  11402. result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
  11403. m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) );
  11404. }
  11405. } while ( result == VK_INCOMPLETE );
  11406. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
  11407. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  11408. if ( propertyCount < properties.size() )
  11409. {
  11410. properties.resize( propertyCount );
  11411. }
  11412. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  11413. }
  11414. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11415. template <typename Dispatch>
  11416. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  11417. uint32_t * pPropertyCount,
  11418. VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,
  11419. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11420. {
  11421. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11422. return static_cast<Result>( d.vkGetDisplayModeProperties2KHR(
  11423. m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) );
  11424. }
  11425. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11426. template <typename DisplayModeProperties2KHRAllocator, typename Dispatch>
  11427. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  11428. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
  11429. PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  11430. {
  11431. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11432. std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties;
  11433. uint32_t propertyCount;
  11434. VkResult result;
  11435. do
  11436. {
  11437. result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
  11438. if ( ( result == VK_SUCCESS ) && propertyCount )
  11439. {
  11440. properties.resize( propertyCount );
  11441. result = d.vkGetDisplayModeProperties2KHR(
  11442. m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) );
  11443. }
  11444. } while ( result == VK_INCOMPLETE );
  11445. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
  11446. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  11447. if ( propertyCount < properties.size() )
  11448. {
  11449. properties.resize( propertyCount );
  11450. }
  11451. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  11452. }
  11453. template <typename DisplayModeProperties2KHRAllocator,
  11454. typename Dispatch,
  11455. typename B1,
  11456. typename std::enable_if<std::is_same<typename B1::value_type, DisplayModeProperties2KHR>::value, int>::type>
  11457. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  11458. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
  11459. PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  11460. DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,
  11461. Dispatch const & d ) const
  11462. {
  11463. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11464. std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties( displayModeProperties2KHRAllocator );
  11465. uint32_t propertyCount;
  11466. VkResult result;
  11467. do
  11468. {
  11469. result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
  11470. if ( ( result == VK_SUCCESS ) && propertyCount )
  11471. {
  11472. properties.resize( propertyCount );
  11473. result = d.vkGetDisplayModeProperties2KHR(
  11474. m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) );
  11475. }
  11476. } while ( result == VK_INCOMPLETE );
  11477. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
  11478. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  11479. if ( propertyCount < properties.size() )
  11480. {
  11481. properties.resize( propertyCount );
  11482. }
  11483. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  11484. }
  11485. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11486. template <typename Dispatch>
  11487. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  11488. PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo,
  11489. VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,
  11490. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11491. {
  11492. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11493. return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
  11494. reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ),
  11495. reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) );
  11496. }
  11497. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11498. template <typename Dispatch>
  11499. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type
  11500. PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const
  11501. {
  11502. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11503. VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities;
  11504. VkResult result = d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
  11505. reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ),
  11506. reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) );
  11507. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
  11508. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities );
  11509. }
  11510. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11511. #if defined( VK_USE_PLATFORM_IOS_MVK )
  11512. //=== VK_MVK_ios_surface ===
  11513. template <typename Dispatch>
  11514. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,
  11515. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  11516. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  11517. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11518. {
  11519. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11520. return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance,
  11521. reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ),
  11522. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  11523. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  11524. }
  11525. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11526. template <typename Dispatch>
  11527. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  11528. Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,
  11529. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  11530. Dispatch const & d ) const
  11531. {
  11532. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11533. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  11534. VkResult result =
  11535. d.vkCreateIOSSurfaceMVK( m_instance,
  11536. reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
  11537. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  11538. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  11539. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" );
  11540. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  11541. }
  11542. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  11543. template <typename Dispatch>
  11544. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  11545. Instance::createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,
  11546. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  11547. Dispatch const & d ) const
  11548. {
  11549. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11550. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  11551. VkResult result =
  11552. d.vkCreateIOSSurfaceMVK( m_instance,
  11553. reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
  11554. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  11555. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  11556. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" );
  11557. return createResultValueType(
  11558. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  11559. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  11560. }
  11561. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  11562. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11563. #endif /*VK_USE_PLATFORM_IOS_MVK*/
  11564. #if defined( VK_USE_PLATFORM_MACOS_MVK )
  11565. //=== VK_MVK_macos_surface ===
  11566. template <typename Dispatch>
  11567. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,
  11568. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  11569. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  11570. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11571. {
  11572. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11573. return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance,
  11574. reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ),
  11575. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  11576. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  11577. }
  11578. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11579. template <typename Dispatch>
  11580. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  11581. Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,
  11582. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  11583. Dispatch const & d ) const
  11584. {
  11585. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11586. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  11587. VkResult result =
  11588. d.vkCreateMacOSSurfaceMVK( m_instance,
  11589. reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
  11590. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  11591. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  11592. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" );
  11593. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  11594. }
  11595. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  11596. template <typename Dispatch>
  11597. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  11598. Instance::createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,
  11599. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  11600. Dispatch const & d ) const
  11601. {
  11602. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11603. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  11604. VkResult result =
  11605. d.vkCreateMacOSSurfaceMVK( m_instance,
  11606. reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
  11607. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  11608. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  11609. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" );
  11610. return createResultValueType(
  11611. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  11612. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  11613. }
  11614. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  11615. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11616. #endif /*VK_USE_PLATFORM_MACOS_MVK*/
  11617. //=== VK_EXT_debug_utils ===
  11618. template <typename Dispatch>
  11619. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo,
  11620. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11621. {
  11622. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11623. return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) );
  11624. }
  11625. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11626. template <typename Dispatch>
  11627. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  11628. Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
  11629. {
  11630. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11631. VkResult result = d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) );
  11632. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" );
  11633. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  11634. }
  11635. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11636. template <typename Dispatch>
  11637. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo,
  11638. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11639. {
  11640. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11641. return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) );
  11642. }
  11643. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11644. template <typename Dispatch>
  11645. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  11646. Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
  11647. {
  11648. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11649. VkResult result = d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) );
  11650. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" );
  11651. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  11652. }
  11653. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11654. template <typename Dispatch>
  11655. VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
  11656. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11657. {
  11658. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11659. d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
  11660. }
  11661. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11662. template <typename Dispatch>
  11663. VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
  11664. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11665. {
  11666. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11667. d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  11668. }
  11669. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11670. template <typename Dispatch>
  11671. VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11672. {
  11673. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11674. d.vkQueueEndDebugUtilsLabelEXT( m_queue );
  11675. }
  11676. template <typename Dispatch>
  11677. VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
  11678. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11679. {
  11680. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11681. d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
  11682. }
  11683. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11684. template <typename Dispatch>
  11685. VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
  11686. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11687. {
  11688. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11689. d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  11690. }
  11691. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11692. template <typename Dispatch>
  11693. VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
  11694. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11695. {
  11696. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11697. d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
  11698. }
  11699. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11700. template <typename Dispatch>
  11701. VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
  11702. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11703. {
  11704. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11705. d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  11706. }
  11707. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11708. template <typename Dispatch>
  11709. VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11710. {
  11711. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11712. d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
  11713. }
  11714. template <typename Dispatch>
  11715. VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
  11716. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11717. {
  11718. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11719. d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
  11720. }
  11721. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11722. template <typename Dispatch>
  11723. VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
  11724. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11725. {
  11726. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11727. d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  11728. }
  11729. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11730. template <typename Dispatch>
  11731. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  11732. Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo,
  11733. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  11734. VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger,
  11735. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11736. {
  11737. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11738. return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance,
  11739. reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ),
  11740. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  11741. reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) );
  11742. }
  11743. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11744. template <typename Dispatch>
  11745. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type
  11746. Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,
  11747. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  11748. Dispatch const & d ) const
  11749. {
  11750. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11751. VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
  11752. VkResult result = d.vkCreateDebugUtilsMessengerEXT(
  11753. m_instance,
  11754. reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
  11755. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  11756. reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) );
  11757. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" );
  11758. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), messenger );
  11759. }
  11760. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  11761. template <typename Dispatch>
  11762. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type
  11763. Instance::createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,
  11764. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  11765. Dispatch const & d ) const
  11766. {
  11767. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11768. VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
  11769. VkResult result = d.vkCreateDebugUtilsMessengerEXT(
  11770. m_instance,
  11771. reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
  11772. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  11773. reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) );
  11774. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" );
  11775. return createResultValueType(
  11776. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  11777. UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>( messenger, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  11778. }
  11779. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  11780. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11781. template <typename Dispatch>
  11782. VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
  11783. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  11784. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11785. {
  11786. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11787. d.vkDestroyDebugUtilsMessengerEXT(
  11788. m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  11789. }
  11790. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11791. template <typename Dispatch>
  11792. VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
  11793. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  11794. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11795. {
  11796. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11797. d.vkDestroyDebugUtilsMessengerEXT(
  11798. m_instance,
  11799. static_cast<VkDebugUtilsMessengerEXT>( messenger ),
  11800. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  11801. }
  11802. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11803. template <typename Dispatch>
  11804. VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
  11805. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  11806. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11807. {
  11808. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11809. d.vkDestroyDebugUtilsMessengerEXT(
  11810. m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  11811. }
  11812. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11813. template <typename Dispatch>
  11814. VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
  11815. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  11816. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11817. {
  11818. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11819. d.vkDestroyDebugUtilsMessengerEXT(
  11820. m_instance,
  11821. static_cast<VkDebugUtilsMessengerEXT>( messenger ),
  11822. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  11823. }
  11824. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11825. template <typename Dispatch>
  11826. VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
  11827. VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
  11828. const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData,
  11829. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11830. {
  11831. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11832. d.vkSubmitDebugUtilsMessageEXT( m_instance,
  11833. static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
  11834. static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
  11835. reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) );
  11836. }
  11837. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11838. template <typename Dispatch>
  11839. VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
  11840. VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
  11841. const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData,
  11842. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11843. {
  11844. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11845. d.vkSubmitDebugUtilsMessageEXT( m_instance,
  11846. static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
  11847. static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
  11848. reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) );
  11849. }
  11850. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11851. #if defined( VK_USE_PLATFORM_ANDROID_KHR )
  11852. //=== VK_ANDROID_external_memory_android_hardware_buffer ===
  11853. template <typename Dispatch>
  11854. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  11855. Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer,
  11856. VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,
  11857. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11858. {
  11859. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11860. return static_cast<Result>(
  11861. d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) );
  11862. }
  11863. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11864. template <typename Dispatch>
  11865. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type
  11866. Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
  11867. {
  11868. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11869. VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
  11870. VkResult result =
  11871. d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) );
  11872. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
  11873. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  11874. }
  11875. template <typename X, typename Y, typename... Z, typename Dispatch>
  11876. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type
  11877. Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
  11878. {
  11879. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11880. StructureChain<X, Y, Z...> structureChain;
  11881. VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties =
  11882. structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
  11883. VkResult result =
  11884. d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) );
  11885. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
  11886. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
  11887. }
  11888. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11889. template <typename Dispatch>
  11890. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  11891. Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,
  11892. struct AHardwareBuffer ** pBuffer,
  11893. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11894. {
  11895. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11896. return static_cast<Result>(
  11897. d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) );
  11898. }
  11899. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11900. template <typename Dispatch>
  11901. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer *>::type
  11902. Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const
  11903. {
  11904. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11905. struct AHardwareBuffer * buffer;
  11906. VkResult result =
  11907. d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer );
  11908. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" );
  11909. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), buffer );
  11910. }
  11911. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11912. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  11913. //=== VK_EXT_sample_locations ===
  11914. template <typename Dispatch>
  11915. VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo,
  11916. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11917. {
  11918. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11919. d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) );
  11920. }
  11921. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11922. template <typename Dispatch>
  11923. VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo,
  11924. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11925. {
  11926. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11927. d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
  11928. }
  11929. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11930. template <typename Dispatch>
  11931. VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
  11932. VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,
  11933. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11934. {
  11935. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11936. d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
  11937. m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) );
  11938. }
  11939. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11940. template <typename Dispatch>
  11941. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT
  11942. PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11943. {
  11944. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11945. VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties;
  11946. d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
  11947. m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
  11948. return multisampleProperties;
  11949. }
  11950. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11951. //=== VK_KHR_get_memory_requirements2 ===
  11952. template <typename Dispatch>
  11953. VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
  11954. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  11955. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11956. {
  11957. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11958. d.vkGetImageMemoryRequirements2KHR(
  11959. m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  11960. }
  11961. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11962. template <typename Dispatch>
  11963. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  11964. Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11965. {
  11966. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11967. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  11968. d.vkGetImageMemoryRequirements2KHR(
  11969. m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  11970. return memoryRequirements;
  11971. }
  11972. template <typename X, typename Y, typename... Z, typename Dispatch>
  11973. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
  11974. Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11975. {
  11976. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11977. StructureChain<X, Y, Z...> structureChain;
  11978. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  11979. d.vkGetImageMemoryRequirements2KHR(
  11980. m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  11981. return structureChain;
  11982. }
  11983. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  11984. template <typename Dispatch>
  11985. VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
  11986. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  11987. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11988. {
  11989. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11990. d.vkGetBufferMemoryRequirements2KHR(
  11991. m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  11992. }
  11993. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11994. template <typename Dispatch>
  11995. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  11996. Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11997. {
  11998. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11999. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  12000. d.vkGetBufferMemoryRequirements2KHR(
  12001. m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  12002. return memoryRequirements;
  12003. }
  12004. template <typename X, typename Y, typename... Z, typename Dispatch>
  12005. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
  12006. Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12007. {
  12008. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12009. StructureChain<X, Y, Z...> structureChain;
  12010. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  12011. d.vkGetBufferMemoryRequirements2KHR(
  12012. m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  12013. return structureChain;
  12014. }
  12015. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12016. template <typename Dispatch>
  12017. VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
  12018. uint32_t * pSparseMemoryRequirementCount,
  12019. VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
  12020. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12021. {
  12022. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12023. d.vkGetImageSparseMemoryRequirements2KHR( m_device,
  12024. reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
  12025. pSparseMemoryRequirementCount,
  12026. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
  12027. }
  12028. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12029. template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
  12030. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
  12031. Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const
  12032. {
  12033. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12034. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
  12035. uint32_t sparseMemoryRequirementCount;
  12036. d.vkGetImageSparseMemoryRequirements2KHR(
  12037. m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
  12038. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  12039. d.vkGetImageSparseMemoryRequirements2KHR( m_device,
  12040. reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
  12041. &sparseMemoryRequirementCount,
  12042. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  12043. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  12044. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  12045. {
  12046. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  12047. }
  12048. return sparseMemoryRequirements;
  12049. }
  12050. template <typename SparseImageMemoryRequirements2Allocator,
  12051. typename Dispatch,
  12052. typename B1,
  12053. typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type>
  12054. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
  12055. Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
  12056. SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
  12057. Dispatch const & d ) const
  12058. {
  12059. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12060. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
  12061. sparseImageMemoryRequirements2Allocator );
  12062. uint32_t sparseMemoryRequirementCount;
  12063. d.vkGetImageSparseMemoryRequirements2KHR(
  12064. m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
  12065. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  12066. d.vkGetImageSparseMemoryRequirements2KHR( m_device,
  12067. reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
  12068. &sparseMemoryRequirementCount,
  12069. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  12070. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  12071. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  12072. {
  12073. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  12074. }
  12075. return sparseMemoryRequirements;
  12076. }
  12077. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12078. //=== VK_KHR_acceleration_structure ===
  12079. template <typename Dispatch>
  12080. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  12081. Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo,
  12082. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  12083. VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure,
  12084. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12085. {
  12086. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12087. return static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device,
  12088. reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ),
  12089. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  12090. reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) );
  12091. }
  12092. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12093. template <typename Dispatch>
  12094. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type
  12095. Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,
  12096. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12097. Dispatch const & d ) const
  12098. {
  12099. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12100. VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
  12101. VkResult result = d.vkCreateAccelerationStructureKHR(
  12102. m_device,
  12103. reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
  12104. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  12105. reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) );
  12106. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" );
  12107. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), accelerationStructure );
  12108. }
  12109. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  12110. template <typename Dispatch>
  12111. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type
  12112. Device::createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,
  12113. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12114. Dispatch const & d ) const
  12115. {
  12116. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12117. VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
  12118. VkResult result = d.vkCreateAccelerationStructureKHR(
  12119. m_device,
  12120. reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
  12121. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  12122. reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) );
  12123. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" );
  12124. return createResultValueType(
  12125. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12126. UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>( accelerationStructure, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  12127. }
  12128. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  12129. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12130. template <typename Dispatch>
  12131. VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
  12132. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  12133. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12134. {
  12135. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12136. d.vkDestroyAccelerationStructureKHR(
  12137. m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  12138. }
  12139. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12140. template <typename Dispatch>
  12141. VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
  12142. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12143. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12144. {
  12145. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12146. d.vkDestroyAccelerationStructureKHR(
  12147. m_device,
  12148. static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
  12149. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  12150. }
  12151. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12152. template <typename Dispatch>
  12153. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
  12154. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  12155. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12156. {
  12157. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12158. d.vkDestroyAccelerationStructureKHR(
  12159. m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  12160. }
  12161. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12162. template <typename Dispatch>
  12163. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
  12164. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12165. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12166. {
  12167. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12168. d.vkDestroyAccelerationStructureKHR(
  12169. m_device,
  12170. static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
  12171. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  12172. }
  12173. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12174. template <typename Dispatch>
  12175. VULKAN_HPP_INLINE void
  12176. CommandBuffer::buildAccelerationStructuresKHR( uint32_t infoCount,
  12177. const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
  12178. const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
  12179. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12180. {
  12181. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12182. d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer,
  12183. infoCount,
  12184. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
  12185. reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) );
  12186. }
  12187. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12188. template <typename Dispatch>
  12189. VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR(
  12190. ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
  12191. ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
  12192. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  12193. {
  12194. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12195. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  12196. VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
  12197. # else
  12198. if ( infos.size() != pBuildRangeInfos.size() )
  12199. {
  12200. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
  12201. }
  12202. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  12203. d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer,
  12204. infos.size(),
  12205. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
  12206. reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
  12207. }
  12208. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12209. template <typename Dispatch>
  12210. VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( uint32_t infoCount,
  12211. const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
  12212. const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses,
  12213. const uint32_t * pIndirectStrides,
  12214. const uint32_t * const * ppMaxPrimitiveCounts,
  12215. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12216. {
  12217. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12218. d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer,
  12219. infoCount,
  12220. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
  12221. reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ),
  12222. pIndirectStrides,
  12223. ppMaxPrimitiveCounts );
  12224. }
  12225. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12226. template <typename Dispatch>
  12227. VULKAN_HPP_INLINE void
  12228. CommandBuffer::buildAccelerationStructuresIndirectKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
  12229. ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
  12230. ArrayProxy<const uint32_t> const & indirectStrides,
  12231. ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts,
  12232. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  12233. {
  12234. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12235. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  12236. VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() );
  12237. VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() );
  12238. VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() );
  12239. # else
  12240. if ( infos.size() != indirectDeviceAddresses.size() )
  12241. {
  12242. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" );
  12243. }
  12244. if ( infos.size() != indirectStrides.size() )
  12245. {
  12246. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" );
  12247. }
  12248. if ( infos.size() != pMaxPrimitiveCounts.size() )
  12249. {
  12250. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" );
  12251. }
  12252. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  12253. d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer,
  12254. infos.size(),
  12255. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
  12256. reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ),
  12257. indirectStrides.data(),
  12258. pMaxPrimitiveCounts.data() );
  12259. }
  12260. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12261. template <typename Dispatch>
  12262. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  12263. Device::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  12264. uint32_t infoCount,
  12265. const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
  12266. const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
  12267. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12268. {
  12269. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12270. return static_cast<Result>(
  12271. d.vkBuildAccelerationStructuresKHR( m_device,
  12272. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  12273. infoCount,
  12274. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
  12275. reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ) );
  12276. }
  12277. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12278. template <typename Dispatch>
  12279. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  12280. Device::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  12281. ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
  12282. ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
  12283. Dispatch const & d ) const
  12284. {
  12285. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12286. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  12287. VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
  12288. # else
  12289. if ( infos.size() != pBuildRangeInfos.size() )
  12290. {
  12291. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
  12292. }
  12293. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  12294. VkResult result =
  12295. d.vkBuildAccelerationStructuresKHR( m_device,
  12296. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  12297. infos.size(),
  12298. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
  12299. reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
  12300. resultCheck(
  12301. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12302. VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR",
  12303. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  12304. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  12305. }
  12306. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12307. template <typename Dispatch>
  12308. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  12309. const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
  12310. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12311. {
  12312. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12313. return static_cast<Result>( d.vkCopyAccelerationStructureKHR(
  12314. m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) );
  12315. }
  12316. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12317. template <typename Dispatch>
  12318. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  12319. Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  12320. const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,
  12321. Dispatch const & d ) const
  12322. {
  12323. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12324. VkResult result = d.vkCopyAccelerationStructureKHR(
  12325. m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
  12326. resultCheck(
  12327. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12328. VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR",
  12329. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  12330. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  12331. }
  12332. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12333. template <typename Dispatch>
  12334. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  12335. Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  12336. const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
  12337. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12338. {
  12339. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12340. return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR(
  12341. m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) );
  12342. }
  12343. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12344. template <typename Dispatch>
  12345. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  12346. Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  12347. const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,
  12348. Dispatch const & d ) const
  12349. {
  12350. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12351. VkResult result = d.vkCopyAccelerationStructureToMemoryKHR(
  12352. m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
  12353. resultCheck(
  12354. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12355. VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR",
  12356. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  12357. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  12358. }
  12359. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12360. template <typename Dispatch>
  12361. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  12362. Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  12363. const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
  12364. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12365. {
  12366. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12367. return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR(
  12368. m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) );
  12369. }
  12370. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12371. template <typename Dispatch>
  12372. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  12373. Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  12374. const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,
  12375. Dispatch const & d ) const
  12376. {
  12377. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12378. VkResult result = d.vkCopyMemoryToAccelerationStructureKHR(
  12379. m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
  12380. resultCheck(
  12381. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12382. VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR",
  12383. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  12384. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  12385. }
  12386. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12387. template <typename Dispatch>
  12388. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  12389. Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount,
  12390. const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
  12391. VULKAN_HPP_NAMESPACE::QueryType queryType,
  12392. size_t dataSize,
  12393. void * pData,
  12394. size_t stride,
  12395. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12396. {
  12397. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12398. return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
  12399. accelerationStructureCount,
  12400. reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
  12401. static_cast<VkQueryType>( queryType ),
  12402. dataSize,
  12403. pData,
  12404. stride ) );
  12405. }
  12406. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12407. template <typename DataType, typename DataTypeAllocator, typename Dispatch>
  12408. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type
  12409. Device::writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
  12410. VULKAN_HPP_NAMESPACE::QueryType queryType,
  12411. size_t dataSize,
  12412. size_t stride,
  12413. Dispatch const & d ) const
  12414. {
  12415. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12416. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  12417. std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
  12418. VkResult result = d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
  12419. accelerationStructures.size(),
  12420. reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
  12421. static_cast<VkQueryType>( queryType ),
  12422. data.size() * sizeof( DataType ),
  12423. reinterpret_cast<void *>( data.data() ),
  12424. stride );
  12425. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
  12426. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  12427. }
  12428. template <typename DataType, typename Dispatch>
  12429. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
  12430. Device::writeAccelerationStructuresPropertyKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
  12431. VULKAN_HPP_NAMESPACE::QueryType queryType,
  12432. size_t stride,
  12433. Dispatch const & d ) const
  12434. {
  12435. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12436. DataType data;
  12437. VkResult result = d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
  12438. accelerationStructures.size(),
  12439. reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
  12440. static_cast<VkQueryType>( queryType ),
  12441. sizeof( DataType ),
  12442. reinterpret_cast<void *>( &data ),
  12443. stride );
  12444. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" );
  12445. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  12446. }
  12447. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12448. template <typename Dispatch>
  12449. VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
  12450. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12451. {
  12452. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12453. d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) );
  12454. }
  12455. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12456. template <typename Dispatch>
  12457. VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,
  12458. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12459. {
  12460. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12461. d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
  12462. }
  12463. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12464. template <typename Dispatch>
  12465. VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
  12466. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12467. {
  12468. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12469. d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) );
  12470. }
  12471. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12472. template <typename Dispatch>
  12473. VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,
  12474. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12475. {
  12476. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12477. d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
  12478. }
  12479. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12480. template <typename Dispatch>
  12481. VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
  12482. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12483. {
  12484. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12485. d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) );
  12486. }
  12487. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12488. template <typename Dispatch>
  12489. VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,
  12490. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12491. {
  12492. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12493. d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
  12494. }
  12495. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12496. template <typename Dispatch>
  12497. VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo,
  12498. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12499. {
  12500. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12501. return static_cast<DeviceAddress>(
  12502. d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) );
  12503. }
  12504. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12505. template <typename Dispatch>
  12506. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
  12507. Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info,
  12508. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12509. {
  12510. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12511. VkDeviceAddress result =
  12512. d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) );
  12513. return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
  12514. }
  12515. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12516. template <typename Dispatch>
  12517. VULKAN_HPP_INLINE void
  12518. CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount,
  12519. const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
  12520. VULKAN_HPP_NAMESPACE::QueryType queryType,
  12521. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  12522. uint32_t firstQuery,
  12523. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12524. {
  12525. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12526. d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer,
  12527. accelerationStructureCount,
  12528. reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
  12529. static_cast<VkQueryType>( queryType ),
  12530. static_cast<VkQueryPool>( queryPool ),
  12531. firstQuery );
  12532. }
  12533. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12534. template <typename Dispatch>
  12535. VULKAN_HPP_INLINE void
  12536. CommandBuffer::writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
  12537. VULKAN_HPP_NAMESPACE::QueryType queryType,
  12538. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  12539. uint32_t firstQuery,
  12540. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12541. {
  12542. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12543. d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer,
  12544. accelerationStructures.size(),
  12545. reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
  12546. static_cast<VkQueryType>( queryType ),
  12547. static_cast<VkQueryPool>( queryPool ),
  12548. firstQuery );
  12549. }
  12550. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12551. template <typename Dispatch>
  12552. VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo,
  12553. VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,
  12554. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12555. {
  12556. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12557. d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device,
  12558. reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ),
  12559. reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
  12560. }
  12561. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12562. template <typename Dispatch>
  12563. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
  12564. Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo,
  12565. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12566. {
  12567. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12568. VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
  12569. d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device,
  12570. reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ),
  12571. reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
  12572. return compatibility;
  12573. }
  12574. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12575. template <typename Dispatch>
  12576. VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
  12577. const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo,
  12578. const uint32_t * pMaxPrimitiveCounts,
  12579. VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo,
  12580. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12581. {
  12582. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12583. d.vkGetAccelerationStructureBuildSizesKHR( m_device,
  12584. static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
  12585. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ),
  12586. pMaxPrimitiveCounts,
  12587. reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) );
  12588. }
  12589. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12590. template <typename Dispatch>
  12591. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR
  12592. Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
  12593. const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,
  12594. ArrayProxy<const uint32_t> const & maxPrimitiveCounts,
  12595. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  12596. {
  12597. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12598. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  12599. VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount );
  12600. # else
  12601. if ( maxPrimitiveCounts.size() != buildInfo.geometryCount )
  12602. {
  12603. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" );
  12604. }
  12605. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  12606. VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo;
  12607. d.vkGetAccelerationStructureBuildSizesKHR( m_device,
  12608. static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
  12609. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ),
  12610. maxPrimitiveCounts.data(),
  12611. reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) );
  12612. return sizeInfo;
  12613. }
  12614. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12615. //=== VK_KHR_sampler_ycbcr_conversion ===
  12616. template <typename Dispatch>
  12617. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  12618. Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
  12619. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  12620. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
  12621. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12622. {
  12623. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12624. return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device,
  12625. reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
  12626. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  12627. reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
  12628. }
  12629. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12630. template <typename Dispatch>
  12631. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
  12632. Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
  12633. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12634. Dispatch const & d ) const
  12635. {
  12636. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12637. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
  12638. VkResult result = d.vkCreateSamplerYcbcrConversionKHR(
  12639. m_device,
  12640. reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
  12641. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  12642. reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
  12643. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" );
  12644. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), ycbcrConversion );
  12645. }
  12646. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  12647. template <typename Dispatch>
  12648. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
  12649. Device::createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
  12650. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12651. Dispatch const & d ) const
  12652. {
  12653. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12654. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
  12655. VkResult result = d.vkCreateSamplerYcbcrConversionKHR(
  12656. m_device,
  12657. reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
  12658. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  12659. reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
  12660. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" );
  12661. return createResultValueType(
  12662. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12663. UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  12664. }
  12665. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  12666. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12667. template <typename Dispatch>
  12668. VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
  12669. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  12670. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12671. {
  12672. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12673. d.vkDestroySamplerYcbcrConversionKHR(
  12674. m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  12675. }
  12676. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12677. template <typename Dispatch>
  12678. VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
  12679. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12680. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12681. {
  12682. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12683. d.vkDestroySamplerYcbcrConversionKHR(
  12684. m_device,
  12685. static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
  12686. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  12687. }
  12688. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12689. //=== VK_KHR_bind_memory2 ===
  12690. template <typename Dispatch>
  12691. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount,
  12692. const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
  12693. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12694. {
  12695. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12696. return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
  12697. }
  12698. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12699. template <typename Dispatch>
  12700. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  12701. Device::bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const
  12702. {
  12703. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12704. VkResult result = d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) );
  12705. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" );
  12706. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  12707. }
  12708. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12709. template <typename Dispatch>
  12710. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount,
  12711. const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
  12712. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12713. {
  12714. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12715. return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
  12716. }
  12717. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12718. template <typename Dispatch>
  12719. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  12720. Device::bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const
  12721. {
  12722. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12723. VkResult result = d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) );
  12724. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" );
  12725. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  12726. }
  12727. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12728. //=== VK_EXT_image_drm_format_modifier ===
  12729. template <typename Dispatch>
  12730. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT(
  12731. VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12732. {
  12733. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12734. return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT(
  12735. m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) );
  12736. }
  12737. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12738. template <typename Dispatch>
  12739. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type
  12740. Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
  12741. {
  12742. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12743. VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties;
  12744. VkResult result = d.vkGetImageDrmFormatModifierPropertiesEXT(
  12745. m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) );
  12746. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" );
  12747. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  12748. }
  12749. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12750. //=== VK_EXT_validation_cache ===
  12751. template <typename Dispatch>
  12752. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo,
  12753. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  12754. VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache,
  12755. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12756. {
  12757. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12758. return static_cast<Result>( d.vkCreateValidationCacheEXT( m_device,
  12759. reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ),
  12760. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  12761. reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) );
  12762. }
  12763. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12764. template <typename Dispatch>
  12765. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type
  12766. Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,
  12767. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12768. Dispatch const & d ) const
  12769. {
  12770. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12771. VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
  12772. VkResult result = d.vkCreateValidationCacheEXT(
  12773. m_device,
  12774. reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
  12775. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  12776. reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) );
  12777. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" );
  12778. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), validationCache );
  12779. }
  12780. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  12781. template <typename Dispatch>
  12782. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type
  12783. Device::createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,
  12784. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12785. Dispatch const & d ) const
  12786. {
  12787. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12788. VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
  12789. VkResult result = d.vkCreateValidationCacheEXT(
  12790. m_device,
  12791. reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
  12792. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  12793. reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) );
  12794. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" );
  12795. return createResultValueType(
  12796. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12797. UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>( validationCache, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  12798. }
  12799. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  12800. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12801. template <typename Dispatch>
  12802. VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
  12803. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  12804. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12805. {
  12806. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12807. d.vkDestroyValidationCacheEXT(
  12808. m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  12809. }
  12810. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12811. template <typename Dispatch>
  12812. VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
  12813. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12814. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12815. {
  12816. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12817. d.vkDestroyValidationCacheEXT(
  12818. m_device,
  12819. static_cast<VkValidationCacheEXT>( validationCache ),
  12820. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  12821. }
  12822. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12823. template <typename Dispatch>
  12824. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
  12825. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  12826. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12827. {
  12828. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12829. d.vkDestroyValidationCacheEXT(
  12830. m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  12831. }
  12832. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12833. template <typename Dispatch>
  12834. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
  12835. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12836. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12837. {
  12838. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12839. d.vkDestroyValidationCacheEXT(
  12840. m_device,
  12841. static_cast<VkValidationCacheEXT>( validationCache ),
  12842. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  12843. }
  12844. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12845. template <typename Dispatch>
  12846. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
  12847. uint32_t srcCacheCount,
  12848. const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,
  12849. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12850. {
  12851. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12852. return static_cast<Result>( d.vkMergeValidationCachesEXT(
  12853. m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) );
  12854. }
  12855. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12856. template <typename Dispatch>
  12857. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::mergeValidationCachesEXT(
  12858. VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches, Dispatch const & d ) const
  12859. {
  12860. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12861. VkResult result = d.vkMergeValidationCachesEXT(
  12862. m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.size(), reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) );
  12863. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" );
  12864. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  12865. }
  12866. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12867. template <typename Dispatch>
  12868. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
  12869. size_t * pDataSize,
  12870. void * pData,
  12871. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12872. {
  12873. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12874. return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) );
  12875. }
  12876. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12877. template <typename Uint8_tAllocator, typename Dispatch>
  12878. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
  12879. Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d ) const
  12880. {
  12881. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12882. std::vector<uint8_t, Uint8_tAllocator> data;
  12883. size_t dataSize;
  12884. VkResult result;
  12885. do
  12886. {
  12887. result = d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr );
  12888. if ( ( result == VK_SUCCESS ) && dataSize )
  12889. {
  12890. data.resize( dataSize );
  12891. result =
  12892. d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
  12893. }
  12894. } while ( result == VK_INCOMPLETE );
  12895. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" );
  12896. VULKAN_HPP_ASSERT( dataSize <= data.size() );
  12897. if ( dataSize < data.size() )
  12898. {
  12899. data.resize( dataSize );
  12900. }
  12901. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  12902. }
  12903. template <typename Uint8_tAllocator,
  12904. typename Dispatch,
  12905. typename B1,
  12906. typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type>
  12907. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
  12908. Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
  12909. {
  12910. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12911. std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
  12912. size_t dataSize;
  12913. VkResult result;
  12914. do
  12915. {
  12916. result = d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr );
  12917. if ( ( result == VK_SUCCESS ) && dataSize )
  12918. {
  12919. data.resize( dataSize );
  12920. result =
  12921. d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
  12922. }
  12923. } while ( result == VK_INCOMPLETE );
  12924. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" );
  12925. VULKAN_HPP_ASSERT( dataSize <= data.size() );
  12926. if ( dataSize < data.size() )
  12927. {
  12928. data.resize( dataSize );
  12929. }
  12930. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  12931. }
  12932. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12933. //=== VK_NV_shading_rate_image ===
  12934. template <typename Dispatch>
  12935. VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView,
  12936. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
  12937. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12938. {
  12939. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12940. d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
  12941. }
  12942. template <typename Dispatch>
  12943. VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport,
  12944. uint32_t viewportCount,
  12945. const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes,
  12946. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12947. {
  12948. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12949. d.vkCmdSetViewportShadingRatePaletteNV(
  12950. m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) );
  12951. }
  12952. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12953. template <typename Dispatch>
  12954. VULKAN_HPP_INLINE void
  12955. CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport,
  12956. ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes,
  12957. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12958. {
  12959. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12960. d.vkCmdSetViewportShadingRatePaletteNV(
  12961. m_commandBuffer, firstViewport, shadingRatePalettes.size(), reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) );
  12962. }
  12963. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12964. template <typename Dispatch>
  12965. VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
  12966. uint32_t customSampleOrderCount,
  12967. const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders,
  12968. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12969. {
  12970. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12971. d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
  12972. static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
  12973. customSampleOrderCount,
  12974. reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) );
  12975. }
  12976. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12977. template <typename Dispatch>
  12978. VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
  12979. ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders,
  12980. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12981. {
  12982. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12983. d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
  12984. static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
  12985. customSampleOrders.size(),
  12986. reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) );
  12987. }
  12988. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  12989. //=== VK_NV_ray_tracing ===
  12990. template <typename Dispatch>
  12991. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  12992. Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo,
  12993. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  12994. VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure,
  12995. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12996. {
  12997. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12998. return static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device,
  12999. reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ),
  13000. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  13001. reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) );
  13002. }
  13003. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13004. template <typename Dispatch>
  13005. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type
  13006. Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,
  13007. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13008. Dispatch const & d ) const
  13009. {
  13010. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13011. VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
  13012. VkResult result = d.vkCreateAccelerationStructureNV(
  13013. m_device,
  13014. reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
  13015. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13016. reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) );
  13017. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" );
  13018. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), accelerationStructure );
  13019. }
  13020. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  13021. template <typename Dispatch>
  13022. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type
  13023. Device::createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,
  13024. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13025. Dispatch const & d ) const
  13026. {
  13027. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13028. VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
  13029. VkResult result = d.vkCreateAccelerationStructureNV(
  13030. m_device,
  13031. reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
  13032. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13033. reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) );
  13034. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" );
  13035. return createResultValueType(
  13036. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13037. UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>( accelerationStructure, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  13038. }
  13039. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  13040. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13041. template <typename Dispatch>
  13042. VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
  13043. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  13044. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13045. {
  13046. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13047. d.vkDestroyAccelerationStructureNV(
  13048. m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  13049. }
  13050. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13051. template <typename Dispatch>
  13052. VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
  13053. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13054. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13055. {
  13056. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13057. d.vkDestroyAccelerationStructureNV(
  13058. m_device,
  13059. static_cast<VkAccelerationStructureNV>( accelerationStructure ),
  13060. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  13061. }
  13062. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13063. template <typename Dispatch>
  13064. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
  13065. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  13066. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13067. {
  13068. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13069. d.vkDestroyAccelerationStructureNV(
  13070. m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  13071. }
  13072. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13073. template <typename Dispatch>
  13074. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
  13075. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13076. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13077. {
  13078. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13079. d.vkDestroyAccelerationStructureNV(
  13080. m_device,
  13081. static_cast<VkAccelerationStructureNV>( accelerationStructure ),
  13082. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  13083. }
  13084. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13085. template <typename Dispatch>
  13086. VULKAN_HPP_INLINE void
  13087. Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo,
  13088. VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements,
  13089. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13090. {
  13091. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13092. d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
  13093. reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ),
  13094. reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) );
  13095. }
  13096. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13097. template <typename Dispatch>
  13098. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR
  13099. Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,
  13100. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13101. {
  13102. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13103. VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements;
  13104. d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
  13105. reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
  13106. reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
  13107. return memoryRequirements;
  13108. }
  13109. template <typename X, typename Y, typename... Z, typename Dispatch>
  13110. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
  13111. Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,
  13112. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13113. {
  13114. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13115. StructureChain<X, Y, Z...> structureChain;
  13116. VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
  13117. d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
  13118. reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
  13119. reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
  13120. return structureChain;
  13121. }
  13122. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13123. template <typename Dispatch>
  13124. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV(
  13125. uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13126. {
  13127. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13128. return static_cast<Result>(
  13129. d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) );
  13130. }
  13131. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13132. template <typename Dispatch>
  13133. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  13134. Device::bindAccelerationStructureMemoryNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos,
  13135. Dispatch const & d ) const
  13136. {
  13137. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13138. VkResult result = d.vkBindAccelerationStructureMemoryNV(
  13139. m_device, bindInfos.size(), reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) );
  13140. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" );
  13141. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  13142. }
  13143. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13144. template <typename Dispatch>
  13145. VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo,
  13146. VULKAN_HPP_NAMESPACE::Buffer instanceData,
  13147. VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
  13148. VULKAN_HPP_NAMESPACE::Bool32 update,
  13149. VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
  13150. VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
  13151. VULKAN_HPP_NAMESPACE::Buffer scratch,
  13152. VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
  13153. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13154. {
  13155. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13156. d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
  13157. reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ),
  13158. static_cast<VkBuffer>( instanceData ),
  13159. static_cast<VkDeviceSize>( instanceOffset ),
  13160. static_cast<VkBool32>( update ),
  13161. static_cast<VkAccelerationStructureNV>( dst ),
  13162. static_cast<VkAccelerationStructureNV>( src ),
  13163. static_cast<VkBuffer>( scratch ),
  13164. static_cast<VkDeviceSize>( scratchOffset ) );
  13165. }
  13166. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13167. template <typename Dispatch>
  13168. VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info,
  13169. VULKAN_HPP_NAMESPACE::Buffer instanceData,
  13170. VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
  13171. VULKAN_HPP_NAMESPACE::Bool32 update,
  13172. VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
  13173. VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
  13174. VULKAN_HPP_NAMESPACE::Buffer scratch,
  13175. VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
  13176. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13177. {
  13178. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13179. d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
  13180. reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ),
  13181. static_cast<VkBuffer>( instanceData ),
  13182. static_cast<VkDeviceSize>( instanceOffset ),
  13183. static_cast<VkBool32>( update ),
  13184. static_cast<VkAccelerationStructureNV>( dst ),
  13185. static_cast<VkAccelerationStructureNV>( src ),
  13186. static_cast<VkBuffer>( scratch ),
  13187. static_cast<VkDeviceSize>( scratchOffset ) );
  13188. }
  13189. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13190. template <typename Dispatch>
  13191. VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
  13192. VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
  13193. VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode,
  13194. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13195. {
  13196. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13197. d.vkCmdCopyAccelerationStructureNV( m_commandBuffer,
  13198. static_cast<VkAccelerationStructureNV>( dst ),
  13199. static_cast<VkAccelerationStructureNV>( src ),
  13200. static_cast<VkCopyAccelerationStructureModeKHR>( mode ) );
  13201. }
  13202. template <typename Dispatch>
  13203. VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer,
  13204. VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,
  13205. VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer,
  13206. VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,
  13207. VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,
  13208. VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer,
  13209. VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,
  13210. VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,
  13211. VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,
  13212. VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,
  13213. VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,
  13214. uint32_t width,
  13215. uint32_t height,
  13216. uint32_t depth,
  13217. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13218. {
  13219. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13220. d.vkCmdTraceRaysNV( m_commandBuffer,
  13221. static_cast<VkBuffer>( raygenShaderBindingTableBuffer ),
  13222. static_cast<VkDeviceSize>( raygenShaderBindingOffset ),
  13223. static_cast<VkBuffer>( missShaderBindingTableBuffer ),
  13224. static_cast<VkDeviceSize>( missShaderBindingOffset ),
  13225. static_cast<VkDeviceSize>( missShaderBindingStride ),
  13226. static_cast<VkBuffer>( hitShaderBindingTableBuffer ),
  13227. static_cast<VkDeviceSize>( hitShaderBindingOffset ),
  13228. static_cast<VkDeviceSize>( hitShaderBindingStride ),
  13229. static_cast<VkBuffer>( callableShaderBindingTableBuffer ),
  13230. static_cast<VkDeviceSize>( callableShaderBindingOffset ),
  13231. static_cast<VkDeviceSize>( callableShaderBindingStride ),
  13232. width,
  13233. height,
  13234. depth );
  13235. }
  13236. template <typename Dispatch>
  13237. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  13238. uint32_t createInfoCount,
  13239. const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos,
  13240. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  13241. VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
  13242. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13243. {
  13244. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13245. return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device,
  13246. static_cast<VkPipelineCache>( pipelineCache ),
  13247. createInfoCount,
  13248. reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ),
  13249. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  13250. reinterpret_cast<VkPipeline *>( pPipelines ) ) );
  13251. }
  13252. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13253. template <typename PipelineAllocator, typename Dispatch>
  13254. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
  13255. Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  13256. ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
  13257. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13258. Dispatch const & d ) const
  13259. {
  13260. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13261. std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
  13262. VkResult result = d.vkCreateRayTracingPipelinesNV(
  13263. m_device,
  13264. static_cast<VkPipelineCache>( pipelineCache ),
  13265. createInfos.size(),
  13266. reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
  13267. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13268. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  13269. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13270. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV",
  13271. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  13272. return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
  13273. }
  13274. template <typename PipelineAllocator,
  13275. typename Dispatch,
  13276. typename B0,
  13277. typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
  13278. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
  13279. Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  13280. ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
  13281. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13282. PipelineAllocator & pipelineAllocator,
  13283. Dispatch const & d ) const
  13284. {
  13285. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13286. std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
  13287. VkResult result = d.vkCreateRayTracingPipelinesNV(
  13288. m_device,
  13289. static_cast<VkPipelineCache>( pipelineCache ),
  13290. createInfos.size(),
  13291. reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
  13292. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13293. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  13294. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13295. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV",
  13296. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  13297. return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
  13298. }
  13299. template <typename Dispatch>
  13300. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
  13301. Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  13302. const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
  13303. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13304. Dispatch const & d ) const
  13305. {
  13306. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13307. VULKAN_HPP_NAMESPACE::Pipeline pipeline;
  13308. VkResult result = d.vkCreateRayTracingPipelinesNV(
  13309. m_device,
  13310. static_cast<VkPipelineCache>( pipelineCache ),
  13311. 1,
  13312. reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
  13313. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13314. reinterpret_cast<VkPipeline *>( &pipeline ) );
  13315. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13316. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV",
  13317. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  13318. return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
  13319. }
  13320. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  13321. template <typename Dispatch, typename PipelineAllocator>
  13322. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
  13323. Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  13324. ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
  13325. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13326. Dispatch const & d ) const
  13327. {
  13328. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13329. std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
  13330. VkResult result = d.vkCreateRayTracingPipelinesNV(
  13331. m_device,
  13332. static_cast<VkPipelineCache>( pipelineCache ),
  13333. createInfos.size(),
  13334. reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
  13335. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13336. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  13337. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13338. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique",
  13339. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  13340. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
  13341. uniquePipelines.reserve( createInfos.size() );
  13342. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  13343. for ( auto const & pipeline : pipelines )
  13344. {
  13345. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
  13346. }
  13347. return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
  13348. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
  13349. }
  13350. template <typename Dispatch,
  13351. typename PipelineAllocator,
  13352. typename B0,
  13353. typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
  13354. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
  13355. Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  13356. ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
  13357. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13358. PipelineAllocator & pipelineAllocator,
  13359. Dispatch const & d ) const
  13360. {
  13361. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13362. std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
  13363. VkResult result = d.vkCreateRayTracingPipelinesNV(
  13364. m_device,
  13365. static_cast<VkPipelineCache>( pipelineCache ),
  13366. createInfos.size(),
  13367. reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
  13368. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13369. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  13370. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13371. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique",
  13372. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  13373. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
  13374. uniquePipelines.reserve( createInfos.size() );
  13375. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  13376. for ( auto const & pipeline : pipelines )
  13377. {
  13378. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
  13379. }
  13380. return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
  13381. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
  13382. }
  13383. template <typename Dispatch>
  13384. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
  13385. Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  13386. const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
  13387. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13388. Dispatch const & d ) const
  13389. {
  13390. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13391. VULKAN_HPP_NAMESPACE::Pipeline pipeline;
  13392. VkResult result = d.vkCreateRayTracingPipelinesNV(
  13393. m_device,
  13394. static_cast<VkPipelineCache>( pipelineCache ),
  13395. 1,
  13396. reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
  13397. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13398. reinterpret_cast<VkPipeline *>( &pipeline ) );
  13399. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13400. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique",
  13401. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  13402. return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>(
  13403. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13404. UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  13405. }
  13406. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  13407. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13408. template <typename Dispatch>
  13409. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  13410. uint32_t firstGroup,
  13411. uint32_t groupCount,
  13412. size_t dataSize,
  13413. void * pData,
  13414. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13415. {
  13416. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13417. return static_cast<Result>(
  13418. d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
  13419. }
  13420. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13421. template <typename DataType, typename DataTypeAllocator, typename Dispatch>
  13422. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::getRayTracingShaderGroupHandlesNV(
  13423. VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
  13424. {
  13425. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13426. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  13427. std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
  13428. VkResult result = d.vkGetRayTracingShaderGroupHandlesNV(
  13429. m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
  13430. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" );
  13431. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  13432. }
  13433. template <typename DataType, typename Dispatch>
  13434. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
  13435. Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
  13436. {
  13437. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13438. DataType data;
  13439. VkResult result = d.vkGetRayTracingShaderGroupHandlesNV(
  13440. m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) );
  13441. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" );
  13442. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  13443. }
  13444. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13445. template <typename Dispatch>
  13446. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
  13447. size_t dataSize,
  13448. void * pData,
  13449. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13450. {
  13451. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13452. return static_cast<Result>(
  13453. d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) );
  13454. }
  13455. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13456. template <typename DataType, typename DataTypeAllocator, typename Dispatch>
  13457. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type
  13458. Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d ) const
  13459. {
  13460. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13461. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  13462. std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
  13463. VkResult result = d.vkGetAccelerationStructureHandleNV(
  13464. m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
  13465. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
  13466. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  13467. }
  13468. template <typename DataType, typename Dispatch>
  13469. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
  13470. Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Dispatch const & d ) const
  13471. {
  13472. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13473. DataType data;
  13474. VkResult result = d.vkGetAccelerationStructureHandleNV(
  13475. m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), sizeof( DataType ), reinterpret_cast<void *>( &data ) );
  13476. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
  13477. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  13478. }
  13479. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13480. template <typename Dispatch>
  13481. VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount,
  13482. const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,
  13483. VULKAN_HPP_NAMESPACE::QueryType queryType,
  13484. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  13485. uint32_t firstQuery,
  13486. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13487. {
  13488. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13489. d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer,
  13490. accelerationStructureCount,
  13491. reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ),
  13492. static_cast<VkQueryType>( queryType ),
  13493. static_cast<VkQueryPool>( queryPool ),
  13494. firstQuery );
  13495. }
  13496. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13497. template <typename Dispatch>
  13498. VULKAN_HPP_INLINE void
  13499. CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
  13500. VULKAN_HPP_NAMESPACE::QueryType queryType,
  13501. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  13502. uint32_t firstQuery,
  13503. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13504. {
  13505. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13506. d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer,
  13507. accelerationStructures.size(),
  13508. reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ),
  13509. static_cast<VkQueryType>( queryType ),
  13510. static_cast<VkQueryPool>( queryPool ),
  13511. firstQuery );
  13512. }
  13513. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13514. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13515. template <typename Dispatch>
  13516. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  13517. uint32_t shader,
  13518. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13519. {
  13520. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13521. return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
  13522. }
  13523. #else
  13524. template <typename Dispatch>
  13525. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  13526. Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const
  13527. {
  13528. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13529. VkResult result = d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader );
  13530. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" );
  13531. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  13532. }
  13533. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13534. //=== VK_KHR_maintenance3 ===
  13535. template <typename Dispatch>
  13536. VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
  13537. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
  13538. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13539. {
  13540. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13541. d.vkGetDescriptorSetLayoutSupportKHR(
  13542. m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
  13543. }
  13544. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13545. template <typename Dispatch>
  13546. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
  13547. Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
  13548. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13549. {
  13550. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13551. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
  13552. d.vkGetDescriptorSetLayoutSupportKHR(
  13553. m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
  13554. return support;
  13555. }
  13556. template <typename X, typename Y, typename... Z, typename Dispatch>
  13557. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
  13558. Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
  13559. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13560. {
  13561. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13562. StructureChain<X, Y, Z...> structureChain;
  13563. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
  13564. d.vkGetDescriptorSetLayoutSupportKHR(
  13565. m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
  13566. return structureChain;
  13567. }
  13568. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13569. //=== VK_KHR_draw_indirect_count ===
  13570. template <typename Dispatch>
  13571. VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
  13572. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  13573. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  13574. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  13575. uint32_t maxDrawCount,
  13576. uint32_t stride,
  13577. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13578. {
  13579. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13580. d.vkCmdDrawIndirectCountKHR( m_commandBuffer,
  13581. static_cast<VkBuffer>( buffer ),
  13582. static_cast<VkDeviceSize>( offset ),
  13583. static_cast<VkBuffer>( countBuffer ),
  13584. static_cast<VkDeviceSize>( countBufferOffset ),
  13585. maxDrawCount,
  13586. stride );
  13587. }
  13588. template <typename Dispatch>
  13589. VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
  13590. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  13591. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  13592. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  13593. uint32_t maxDrawCount,
  13594. uint32_t stride,
  13595. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13596. {
  13597. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13598. d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer,
  13599. static_cast<VkBuffer>( buffer ),
  13600. static_cast<VkDeviceSize>( offset ),
  13601. static_cast<VkBuffer>( countBuffer ),
  13602. static_cast<VkDeviceSize>( countBufferOffset ),
  13603. maxDrawCount,
  13604. stride );
  13605. }
  13606. //=== VK_EXT_external_memory_host ===
  13607. template <typename Dispatch>
  13608. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  13609. Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
  13610. const void * pHostPointer,
  13611. VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,
  13612. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13613. {
  13614. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13615. return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device,
  13616. static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
  13617. pHostPointer,
  13618. reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) );
  13619. }
  13620. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13621. template <typename Dispatch>
  13622. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type
  13623. Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
  13624. const void * pHostPointer,
  13625. Dispatch const & d ) const
  13626. {
  13627. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13628. VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
  13629. VkResult result = d.vkGetMemoryHostPointerPropertiesEXT( m_device,
  13630. static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
  13631. pHostPointer,
  13632. reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) );
  13633. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" );
  13634. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryHostPointerProperties );
  13635. }
  13636. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13637. //=== VK_AMD_buffer_marker ===
  13638. template <typename Dispatch>
  13639. VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
  13640. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  13641. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  13642. uint32_t marker,
  13643. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13644. {
  13645. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13646. d.vkCmdWriteBufferMarkerAMD( m_commandBuffer,
  13647. static_cast<VkPipelineStageFlagBits>( pipelineStage ),
  13648. static_cast<VkBuffer>( dstBuffer ),
  13649. static_cast<VkDeviceSize>( dstOffset ),
  13650. marker );
  13651. }
  13652. //=== VK_EXT_calibrated_timestamps ===
  13653. template <typename Dispatch>
  13654. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount,
  13655. VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains,
  13656. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13657. {
  13658. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13659. return static_cast<Result>(
  13660. d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( pTimeDomains ) ) );
  13661. }
  13662. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13663. template <typename TimeDomainEXTAllocator, typename Dispatch>
  13664. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator>>::type
  13665. PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const
  13666. {
  13667. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13668. std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator> timeDomains;
  13669. uint32_t timeDomainCount;
  13670. VkResult result;
  13671. do
  13672. {
  13673. result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr );
  13674. if ( ( result == VK_SUCCESS ) && timeDomainCount )
  13675. {
  13676. timeDomains.resize( timeDomainCount );
  13677. result =
  13678. d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) );
  13679. }
  13680. } while ( result == VK_INCOMPLETE );
  13681. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
  13682. VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
  13683. if ( timeDomainCount < timeDomains.size() )
  13684. {
  13685. timeDomains.resize( timeDomainCount );
  13686. }
  13687. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), timeDomains );
  13688. }
  13689. template <typename TimeDomainEXTAllocator,
  13690. typename Dispatch,
  13691. typename B1,
  13692. typename std::enable_if<std::is_same<typename B1::value_type, TimeDomainEXT>::value, int>::type>
  13693. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator>>::type
  13694. PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, Dispatch const & d ) const
  13695. {
  13696. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13697. std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator> timeDomains( timeDomainEXTAllocator );
  13698. uint32_t timeDomainCount;
  13699. VkResult result;
  13700. do
  13701. {
  13702. result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr );
  13703. if ( ( result == VK_SUCCESS ) && timeDomainCount )
  13704. {
  13705. timeDomains.resize( timeDomainCount );
  13706. result =
  13707. d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) );
  13708. }
  13709. } while ( result == VK_INCOMPLETE );
  13710. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
  13711. VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
  13712. if ( timeDomainCount < timeDomains.size() )
  13713. {
  13714. timeDomains.resize( timeDomainCount );
  13715. }
  13716. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), timeDomains );
  13717. }
  13718. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13719. template <typename Dispatch>
  13720. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount,
  13721. const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos,
  13722. uint64_t * pTimestamps,
  13723. uint64_t * pMaxDeviation,
  13724. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13725. {
  13726. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13727. return static_cast<Result>( d.vkGetCalibratedTimestampsEXT(
  13728. m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) );
  13729. }
  13730. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13731. template <typename Uint64_tAllocator, typename Dispatch>
  13732. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
  13733. Device::getCalibratedTimestampsEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos, Dispatch const & d ) const
  13734. {
  13735. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13736. std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data(
  13737. std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) );
  13738. std::vector<uint64_t, Uint64_tAllocator> & timestamps = data.first;
  13739. uint64_t & maxDeviation = data.second;
  13740. VkResult result = d.vkGetCalibratedTimestampsEXT(
  13741. m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ), timestamps.data(), &maxDeviation );
  13742. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
  13743. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  13744. }
  13745. template <typename Uint64_tAllocator,
  13746. typename Dispatch,
  13747. typename B0,
  13748. typename std::enable_if<std::is_same<typename B0::value_type, uint64_t>::value, int>::type>
  13749. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
  13750. Device::getCalibratedTimestampsEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
  13751. Uint64_tAllocator & uint64_tAllocator,
  13752. Dispatch const & d ) const
  13753. {
  13754. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13755. std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data(
  13756. std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) );
  13757. std::vector<uint64_t, Uint64_tAllocator> & timestamps = data.first;
  13758. uint64_t & maxDeviation = data.second;
  13759. VkResult result = d.vkGetCalibratedTimestampsEXT(
  13760. m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ), timestamps.data(), &maxDeviation );
  13761. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
  13762. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  13763. }
  13764. template <typename Dispatch>
  13765. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<uint64_t, uint64_t>>::type
  13766. Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo, Dispatch const & d ) const
  13767. {
  13768. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13769. std::pair<uint64_t, uint64_t> data;
  13770. uint64_t & timestamp = data.first;
  13771. uint64_t & maxDeviation = data.second;
  13772. VkResult result =
  13773. d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( &timestampInfo ), &timestamp, &maxDeviation );
  13774. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" );
  13775. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  13776. }
  13777. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13778. //=== VK_NV_mesh_shader ===
  13779. template <typename Dispatch>
  13780. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13781. {
  13782. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13783. d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
  13784. }
  13785. template <typename Dispatch>
  13786. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
  13787. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  13788. uint32_t drawCount,
  13789. uint32_t stride,
  13790. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13791. {
  13792. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13793. d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
  13794. }
  13795. template <typename Dispatch>
  13796. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
  13797. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  13798. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  13799. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  13800. uint32_t maxDrawCount,
  13801. uint32_t stride,
  13802. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13803. {
  13804. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13805. d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer,
  13806. static_cast<VkBuffer>( buffer ),
  13807. static_cast<VkDeviceSize>( offset ),
  13808. static_cast<VkBuffer>( countBuffer ),
  13809. static_cast<VkDeviceSize>( countBufferOffset ),
  13810. maxDrawCount,
  13811. stride );
  13812. }
  13813. //=== VK_NV_scissor_exclusive ===
  13814. template <typename Dispatch>
  13815. VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor,
  13816. uint32_t exclusiveScissorCount,
  13817. const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors,
  13818. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13819. {
  13820. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13821. d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) );
  13822. }
  13823. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13824. template <typename Dispatch>
  13825. VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor,
  13826. ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors,
  13827. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13828. {
  13829. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13830. d.vkCmdSetExclusiveScissorNV(
  13831. m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) );
  13832. }
  13833. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13834. //=== VK_NV_device_diagnostic_checkpoints ===
  13835. template <typename Dispatch>
  13836. VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13837. {
  13838. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13839. d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
  13840. }
  13841. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13842. template <typename CheckpointMarkerType, typename Dispatch>
  13843. VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13844. {
  13845. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13846. d.vkCmdSetCheckpointNV( m_commandBuffer, reinterpret_cast<const void *>( &checkpointMarker ) );
  13847. }
  13848. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13849. template <typename Dispatch>
  13850. VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount,
  13851. VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData,
  13852. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13853. {
  13854. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13855. d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) );
  13856. }
  13857. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13858. template <typename CheckpointDataNVAllocator, typename Dispatch>
  13859. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator>
  13860. Queue::getCheckpointDataNV( Dispatch const & d ) const
  13861. {
  13862. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13863. std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData;
  13864. uint32_t checkpointDataCount;
  13865. d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
  13866. checkpointData.resize( checkpointDataCount );
  13867. d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
  13868. VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
  13869. if ( checkpointDataCount < checkpointData.size() )
  13870. {
  13871. checkpointData.resize( checkpointDataCount );
  13872. }
  13873. return checkpointData;
  13874. }
  13875. template <typename CheckpointDataNVAllocator,
  13876. typename Dispatch,
  13877. typename B1,
  13878. typename std::enable_if<std::is_same<typename B1::value_type, CheckpointDataNV>::value, int>::type>
  13879. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator>
  13880. Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const
  13881. {
  13882. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13883. std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator );
  13884. uint32_t checkpointDataCount;
  13885. d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
  13886. checkpointData.resize( checkpointDataCount );
  13887. d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
  13888. VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
  13889. if ( checkpointDataCount < checkpointData.size() )
  13890. {
  13891. checkpointData.resize( checkpointDataCount );
  13892. }
  13893. return checkpointData;
  13894. }
  13895. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13896. //=== VK_KHR_timeline_semaphore ===
  13897. template <typename Dispatch>
  13898. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
  13899. uint64_t * pValue,
  13900. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13901. {
  13902. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13903. return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
  13904. }
  13905. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13906. template <typename Dispatch>
  13907. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type
  13908. Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
  13909. {
  13910. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13911. uint64_t value;
  13912. VkResult result = d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value );
  13913. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" );
  13914. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), value );
  13915. }
  13916. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13917. template <typename Dispatch>
  13918. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
  13919. uint64_t timeout,
  13920. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13921. {
  13922. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13923. return static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
  13924. }
  13925. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13926. template <typename Dispatch>
  13927. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  13928. Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
  13929. {
  13930. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13931. VkResult result = d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
  13932. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13933. VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR",
  13934. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
  13935. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  13936. }
  13937. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13938. template <typename Dispatch>
  13939. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,
  13940. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13941. {
  13942. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13943. return static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
  13944. }
  13945. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13946. template <typename Dispatch>
  13947. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  13948. Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
  13949. {
  13950. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13951. VkResult result = d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
  13952. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" );
  13953. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  13954. }
  13955. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13956. //=== VK_INTEL_performance_query ===
  13957. template <typename Dispatch>
  13958. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL(
  13959. const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13960. {
  13961. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13962. return static_cast<Result>(
  13963. d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) );
  13964. }
  13965. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13966. template <typename Dispatch>
  13967. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  13968. Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const
  13969. {
  13970. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13971. VkResult result = d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) );
  13972. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" );
  13973. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  13974. }
  13975. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  13976. template <typename Dispatch>
  13977. VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13978. {
  13979. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13980. d.vkUninitializePerformanceApiINTEL( m_device );
  13981. }
  13982. template <typename Dispatch>
  13983. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo,
  13984. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13985. {
  13986. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13987. return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) );
  13988. }
  13989. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13990. template <typename Dispatch>
  13991. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  13992. CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const
  13993. {
  13994. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13995. VkResult result = d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) );
  13996. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" );
  13997. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  13998. }
  13999. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14000. template <typename Dispatch>
  14001. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL(
  14002. const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14003. {
  14004. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14005. return static_cast<Result>(
  14006. d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) );
  14007. }
  14008. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14009. template <typename Dispatch>
  14010. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  14011. CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const
  14012. {
  14013. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14014. VkResult result = d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) );
  14015. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" );
  14016. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  14017. }
  14018. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14019. template <typename Dispatch>
  14020. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL(
  14021. const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14022. {
  14023. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14024. return static_cast<Result>(
  14025. d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) );
  14026. }
  14027. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14028. template <typename Dispatch>
  14029. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  14030. CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d ) const
  14031. {
  14032. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14033. VkResult result = d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) );
  14034. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" );
  14035. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  14036. }
  14037. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14038. template <typename Dispatch>
  14039. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  14040. Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,
  14041. VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration,
  14042. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14043. {
  14044. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14045. return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device,
  14046. reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ),
  14047. reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) );
  14048. }
  14049. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14050. template <typename Dispatch>
  14051. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type
  14052. Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const
  14053. {
  14054. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14055. VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
  14056. VkResult result = d.vkAcquirePerformanceConfigurationINTEL( m_device,
  14057. reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
  14058. reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) );
  14059. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" );
  14060. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), configuration );
  14061. }
  14062. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  14063. template <typename Dispatch>
  14064. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type
  14065. Device::acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
  14066. Dispatch const & d ) const
  14067. {
  14068. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14069. VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
  14070. VkResult result = d.vkAcquirePerformanceConfigurationINTEL( m_device,
  14071. reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
  14072. reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) );
  14073. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" );
  14074. return createResultValueType(
  14075. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14076. UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>( configuration, ObjectRelease<Device, Dispatch>( *this, d ) ) );
  14077. }
  14078. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  14079. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14080. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14081. template <typename Dispatch>
  14082. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
  14083. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14084. {
  14085. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14086. return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
  14087. }
  14088. #else
  14089. template <typename Dispatch>
  14090. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  14091. Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
  14092. {
  14093. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14094. VkResult result = d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
  14095. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" );
  14096. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  14097. }
  14098. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14099. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14100. template <typename Dispatch>
  14101. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
  14102. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14103. {
  14104. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14105. return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
  14106. }
  14107. #else
  14108. template <typename Dispatch>
  14109. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  14110. Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
  14111. {
  14112. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14113. VkResult result = d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
  14114. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::release" );
  14115. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  14116. }
  14117. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14118. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14119. template <typename Dispatch>
  14120. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
  14121. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14122. {
  14123. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14124. return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
  14125. }
  14126. #else
  14127. template <typename Dispatch>
  14128. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  14129. Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
  14130. {
  14131. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14132. VkResult result = d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
  14133. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" );
  14134. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  14135. }
  14136. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14137. template <typename Dispatch>
  14138. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
  14139. VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue,
  14140. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14141. {
  14142. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14143. return static_cast<Result>( d.vkGetPerformanceParameterINTEL(
  14144. m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) );
  14145. }
  14146. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14147. template <typename Dispatch>
  14148. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type
  14149. Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const & d ) const
  14150. {
  14151. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14152. VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value;
  14153. VkResult result = d.vkGetPerformanceParameterINTEL(
  14154. m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( &value ) );
  14155. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" );
  14156. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), value );
  14157. }
  14158. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14159. //=== VK_AMD_display_native_hdr ===
  14160. template <typename Dispatch>
  14161. VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain,
  14162. VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable,
  14163. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14164. {
  14165. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14166. d.vkSetLocalDimmingAMD( m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
  14167. }
  14168. #if defined( VK_USE_PLATFORM_FUCHSIA )
  14169. //=== VK_FUCHSIA_imagepipe_surface ===
  14170. template <typename Dispatch>
  14171. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  14172. Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,
  14173. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  14174. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  14175. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14176. {
  14177. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14178. return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance,
  14179. reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ),
  14180. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  14181. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  14182. }
  14183. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14184. template <typename Dispatch>
  14185. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  14186. Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
  14187. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  14188. Dispatch const & d ) const
  14189. {
  14190. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14191. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  14192. VkResult result = d.vkCreateImagePipeSurfaceFUCHSIA(
  14193. m_instance,
  14194. reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ),
  14195. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  14196. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  14197. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" );
  14198. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  14199. }
  14200. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  14201. template <typename Dispatch>
  14202. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  14203. Instance::createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
  14204. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  14205. Dispatch const & d ) const
  14206. {
  14207. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14208. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  14209. VkResult result = d.vkCreateImagePipeSurfaceFUCHSIA(
  14210. m_instance,
  14211. reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ),
  14212. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  14213. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  14214. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" );
  14215. return createResultValueType(
  14216. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14217. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  14218. }
  14219. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  14220. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14221. #endif /*VK_USE_PLATFORM_FUCHSIA*/
  14222. #if defined( VK_USE_PLATFORM_METAL_EXT )
  14223. //=== VK_EXT_metal_surface ===
  14224. template <typename Dispatch>
  14225. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo,
  14226. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  14227. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  14228. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14229. {
  14230. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14231. return static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance,
  14232. reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ),
  14233. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  14234. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  14235. }
  14236. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14237. template <typename Dispatch>
  14238. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  14239. Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,
  14240. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  14241. Dispatch const & d ) const
  14242. {
  14243. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14244. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  14245. VkResult result =
  14246. d.vkCreateMetalSurfaceEXT( m_instance,
  14247. reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ),
  14248. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  14249. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  14250. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" );
  14251. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  14252. }
  14253. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  14254. template <typename Dispatch>
  14255. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  14256. Instance::createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,
  14257. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  14258. Dispatch const & d ) const
  14259. {
  14260. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14261. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  14262. VkResult result =
  14263. d.vkCreateMetalSurfaceEXT( m_instance,
  14264. reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ),
  14265. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  14266. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  14267. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" );
  14268. return createResultValueType(
  14269. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14270. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  14271. }
  14272. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  14273. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14274. #endif /*VK_USE_PLATFORM_METAL_EXT*/
  14275. //=== VK_KHR_fragment_shading_rate ===
  14276. template <typename Dispatch>
  14277. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  14278. PhysicalDevice::getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount,
  14279. VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,
  14280. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14281. {
  14282. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14283. return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
  14284. m_physicalDevice, pFragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) );
  14285. }
  14286. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14287. template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, typename Dispatch>
  14288. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  14289. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
  14290. PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const
  14291. {
  14292. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14293. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates;
  14294. uint32_t fragmentShadingRateCount;
  14295. VkResult result;
  14296. do
  14297. {
  14298. result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr );
  14299. if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount )
  14300. {
  14301. fragmentShadingRates.resize( fragmentShadingRateCount );
  14302. result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
  14303. m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) );
  14304. }
  14305. } while ( result == VK_INCOMPLETE );
  14306. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
  14307. VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
  14308. if ( fragmentShadingRateCount < fragmentShadingRates.size() )
  14309. {
  14310. fragmentShadingRates.resize( fragmentShadingRateCount );
  14311. }
  14312. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fragmentShadingRates );
  14313. }
  14314. template <typename PhysicalDeviceFragmentShadingRateKHRAllocator,
  14315. typename Dispatch,
  14316. typename B1,
  14317. typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceFragmentShadingRateKHR>::value, int>::type>
  14318. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  14319. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
  14320. PhysicalDevice::getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,
  14321. Dispatch const & d ) const
  14322. {
  14323. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14324. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates(
  14325. physicalDeviceFragmentShadingRateKHRAllocator );
  14326. uint32_t fragmentShadingRateCount;
  14327. VkResult result;
  14328. do
  14329. {
  14330. result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr );
  14331. if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount )
  14332. {
  14333. fragmentShadingRates.resize( fragmentShadingRateCount );
  14334. result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
  14335. m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) );
  14336. }
  14337. } while ( result == VK_INCOMPLETE );
  14338. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
  14339. VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
  14340. if ( fragmentShadingRateCount < fragmentShadingRates.size() )
  14341. {
  14342. fragmentShadingRates.resize( fragmentShadingRateCount );
  14343. }
  14344. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fragmentShadingRates );
  14345. }
  14346. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14347. template <typename Dispatch>
  14348. VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize,
  14349. const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
  14350. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14351. {
  14352. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14353. d.vkCmdSetFragmentShadingRateKHR(
  14354. m_commandBuffer, reinterpret_cast<const VkExtent2D *>( pFragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
  14355. }
  14356. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14357. template <typename Dispatch>
  14358. VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,
  14359. const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
  14360. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14361. {
  14362. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14363. d.vkCmdSetFragmentShadingRateKHR(
  14364. m_commandBuffer, reinterpret_cast<const VkExtent2D *>( &fragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
  14365. }
  14366. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14367. //=== VK_EXT_buffer_device_address ===
  14368. template <typename Dispatch>
  14369. VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
  14370. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14371. {
  14372. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14373. return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
  14374. }
  14375. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14376. template <typename Dispatch>
  14377. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
  14378. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14379. {
  14380. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14381. VkDeviceAddress result = d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  14382. return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
  14383. }
  14384. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14385. //=== VK_EXT_tooling_info ===
  14386. template <typename Dispatch>
  14387. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount,
  14388. VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,
  14389. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14390. {
  14391. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14392. return static_cast<Result>(
  14393. d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
  14394. }
  14395. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14396. template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch>
  14397. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  14398. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
  14399. PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const
  14400. {
  14401. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14402. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties;
  14403. uint32_t toolCount;
  14404. VkResult result;
  14405. do
  14406. {
  14407. result = d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr );
  14408. if ( ( result == VK_SUCCESS ) && toolCount )
  14409. {
  14410. toolProperties.resize( toolCount );
  14411. result =
  14412. d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
  14413. }
  14414. } while ( result == VK_INCOMPLETE );
  14415. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
  14416. VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
  14417. if ( toolCount < toolProperties.size() )
  14418. {
  14419. toolProperties.resize( toolCount );
  14420. }
  14421. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
  14422. }
  14423. template <typename PhysicalDeviceToolPropertiesAllocator,
  14424. typename Dispatch,
  14425. typename B1,
  14426. typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceToolProperties>::value, int>::type>
  14427. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  14428. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
  14429. PhysicalDevice::getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const
  14430. {
  14431. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14432. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties(
  14433. physicalDeviceToolPropertiesAllocator );
  14434. uint32_t toolCount;
  14435. VkResult result;
  14436. do
  14437. {
  14438. result = d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr );
  14439. if ( ( result == VK_SUCCESS ) && toolCount )
  14440. {
  14441. toolProperties.resize( toolCount );
  14442. result =
  14443. d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
  14444. }
  14445. } while ( result == VK_INCOMPLETE );
  14446. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
  14447. VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
  14448. if ( toolCount < toolProperties.size() )
  14449. {
  14450. toolProperties.resize( toolCount );
  14451. }
  14452. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
  14453. }
  14454. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14455. //=== VK_KHR_present_wait ===
  14456. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14457. template <typename Dispatch>
  14458. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  14459. uint64_t presentId,
  14460. uint64_t timeout,
  14461. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14462. {
  14463. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14464. return static_cast<Result>( d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) );
  14465. }
  14466. #else
  14467. template <typename Dispatch>
  14468. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  14469. Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d ) const
  14470. {
  14471. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14472. VkResult result = d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout );
  14473. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14474. VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR",
  14475. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  14476. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  14477. }
  14478. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14479. //=== VK_NV_cooperative_matrix ===
  14480. template <typename Dispatch>
  14481. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV(
  14482. uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14483. {
  14484. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14485. return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
  14486. m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) );
  14487. }
  14488. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14489. template <typename CooperativeMatrixPropertiesNVAllocator, typename Dispatch>
  14490. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  14491. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
  14492. PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const
  14493. {
  14494. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14495. std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties;
  14496. uint32_t propertyCount;
  14497. VkResult result;
  14498. do
  14499. {
  14500. result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr );
  14501. if ( ( result == VK_SUCCESS ) && propertyCount )
  14502. {
  14503. properties.resize( propertyCount );
  14504. result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
  14505. m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) );
  14506. }
  14507. } while ( result == VK_INCOMPLETE );
  14508. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
  14509. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  14510. if ( propertyCount < properties.size() )
  14511. {
  14512. properties.resize( propertyCount );
  14513. }
  14514. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  14515. }
  14516. template <typename CooperativeMatrixPropertiesNVAllocator,
  14517. typename Dispatch,
  14518. typename B1,
  14519. typename std::enable_if<std::is_same<typename B1::value_type, CooperativeMatrixPropertiesNV>::value, int>::type>
  14520. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  14521. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
  14522. PhysicalDevice::getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator,
  14523. Dispatch const & d ) const
  14524. {
  14525. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14526. std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties(
  14527. cooperativeMatrixPropertiesNVAllocator );
  14528. uint32_t propertyCount;
  14529. VkResult result;
  14530. do
  14531. {
  14532. result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr );
  14533. if ( ( result == VK_SUCCESS ) && propertyCount )
  14534. {
  14535. properties.resize( propertyCount );
  14536. result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
  14537. m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) );
  14538. }
  14539. } while ( result == VK_INCOMPLETE );
  14540. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
  14541. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  14542. if ( propertyCount < properties.size() )
  14543. {
  14544. properties.resize( propertyCount );
  14545. }
  14546. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  14547. }
  14548. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14549. //=== VK_NV_coverage_reduction_mode ===
  14550. template <typename Dispatch>
  14551. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(
  14552. uint32_t * pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14553. {
  14554. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14555. return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
  14556. m_physicalDevice, pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) );
  14557. }
  14558. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14559. template <typename FramebufferMixedSamplesCombinationNVAllocator, typename Dispatch>
  14560. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  14561. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
  14562. PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const
  14563. {
  14564. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14565. std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations;
  14566. uint32_t combinationCount;
  14567. VkResult result;
  14568. do
  14569. {
  14570. result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr );
  14571. if ( ( result == VK_SUCCESS ) && combinationCount )
  14572. {
  14573. combinations.resize( combinationCount );
  14574. result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
  14575. m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) );
  14576. }
  14577. } while ( result == VK_INCOMPLETE );
  14578. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14579. VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
  14580. VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
  14581. if ( combinationCount < combinations.size() )
  14582. {
  14583. combinations.resize( combinationCount );
  14584. }
  14585. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), combinations );
  14586. }
  14587. template <typename FramebufferMixedSamplesCombinationNVAllocator,
  14588. typename Dispatch,
  14589. typename B1,
  14590. typename std::enable_if<std::is_same<typename B1::value_type, FramebufferMixedSamplesCombinationNV>::value, int>::type>
  14591. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  14592. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
  14593. PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(
  14594. FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d ) const
  14595. {
  14596. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14597. std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations(
  14598. framebufferMixedSamplesCombinationNVAllocator );
  14599. uint32_t combinationCount;
  14600. VkResult result;
  14601. do
  14602. {
  14603. result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr );
  14604. if ( ( result == VK_SUCCESS ) && combinationCount )
  14605. {
  14606. combinations.resize( combinationCount );
  14607. result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
  14608. m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) );
  14609. }
  14610. } while ( result == VK_INCOMPLETE );
  14611. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14612. VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
  14613. VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
  14614. if ( combinationCount < combinations.size() )
  14615. {
  14616. combinations.resize( combinationCount );
  14617. }
  14618. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), combinations );
  14619. }
  14620. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14621. #if defined( VK_USE_PLATFORM_WIN32_KHR )
  14622. //=== VK_EXT_full_screen_exclusive ===
  14623. template <typename Dispatch>
  14624. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  14625. PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
  14626. uint32_t * pPresentModeCount,
  14627. VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
  14628. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14629. {
  14630. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14631. return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
  14632. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
  14633. pPresentModeCount,
  14634. reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
  14635. }
  14636. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14637. template <typename PresentModeKHRAllocator, typename Dispatch>
  14638. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
  14639. PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
  14640. {
  14641. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14642. std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes;
  14643. uint32_t presentModeCount;
  14644. VkResult result;
  14645. do
  14646. {
  14647. result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
  14648. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr );
  14649. if ( ( result == VK_SUCCESS ) && presentModeCount )
  14650. {
  14651. presentModes.resize( presentModeCount );
  14652. result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
  14653. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  14654. &presentModeCount,
  14655. reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
  14656. }
  14657. } while ( result == VK_INCOMPLETE );
  14658. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
  14659. VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
  14660. if ( presentModeCount < presentModes.size() )
  14661. {
  14662. presentModes.resize( presentModeCount );
  14663. }
  14664. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
  14665. }
  14666. template <typename PresentModeKHRAllocator,
  14667. typename Dispatch,
  14668. typename B1,
  14669. typename std::enable_if<std::is_same<typename B1::value_type, PresentModeKHR>::value, int>::type>
  14670. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
  14671. PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
  14672. PresentModeKHRAllocator & presentModeKHRAllocator,
  14673. Dispatch const & d ) const
  14674. {
  14675. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14676. std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
  14677. uint32_t presentModeCount;
  14678. VkResult result;
  14679. do
  14680. {
  14681. result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
  14682. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr );
  14683. if ( ( result == VK_SUCCESS ) && presentModeCount )
  14684. {
  14685. presentModes.resize( presentModeCount );
  14686. result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
  14687. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  14688. &presentModeCount,
  14689. reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
  14690. }
  14691. } while ( result == VK_INCOMPLETE );
  14692. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
  14693. VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
  14694. if ( presentModeCount < presentModes.size() )
  14695. {
  14696. presentModes.resize( presentModeCount );
  14697. }
  14698. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
  14699. }
  14700. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14701. # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14702. template <typename Dispatch>
  14703. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  14704. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14705. {
  14706. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14707. return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
  14708. }
  14709. # else
  14710. template <typename Dispatch>
  14711. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  14712. Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  14713. {
  14714. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14715. VkResult result = d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
  14716. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" );
  14717. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  14718. }
  14719. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14720. # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14721. template <typename Dispatch>
  14722. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  14723. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14724. {
  14725. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14726. return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
  14727. }
  14728. # else
  14729. template <typename Dispatch>
  14730. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  14731. Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  14732. {
  14733. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14734. VkResult result = d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
  14735. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" );
  14736. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  14737. }
  14738. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14739. template <typename Dispatch>
  14740. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  14741. Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
  14742. VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
  14743. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14744. {
  14745. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14746. return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT(
  14747. m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
  14748. }
  14749. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14750. template <typename Dispatch>
  14751. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
  14752. Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
  14753. {
  14754. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14755. VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
  14756. VkResult result = d.vkGetDeviceGroupSurfacePresentModes2EXT(
  14757. m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
  14758. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" );
  14759. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), modes );
  14760. }
  14761. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14762. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  14763. //=== VK_EXT_headless_surface ===
  14764. template <typename Dispatch>
  14765. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,
  14766. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  14767. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  14768. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14769. {
  14770. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14771. return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance,
  14772. reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ),
  14773. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  14774. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  14775. }
  14776. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14777. template <typename Dispatch>
  14778. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  14779. Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,
  14780. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  14781. Dispatch const & d ) const
  14782. {
  14783. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14784. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  14785. VkResult result = d.vkCreateHeadlessSurfaceEXT(
  14786. m_instance,
  14787. reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
  14788. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  14789. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  14790. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" );
  14791. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  14792. }
  14793. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  14794. template <typename Dispatch>
  14795. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  14796. Instance::createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,
  14797. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  14798. Dispatch const & d ) const
  14799. {
  14800. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14801. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  14802. VkResult result = d.vkCreateHeadlessSurfaceEXT(
  14803. m_instance,
  14804. reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
  14805. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  14806. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  14807. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" );
  14808. return createResultValueType(
  14809. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14810. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  14811. }
  14812. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  14813. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14814. //=== VK_KHR_buffer_device_address ===
  14815. template <typename Dispatch>
  14816. VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
  14817. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14818. {
  14819. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14820. return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
  14821. }
  14822. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14823. template <typename Dispatch>
  14824. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
  14825. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14826. {
  14827. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14828. VkDeviceAddress result = d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  14829. return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
  14830. }
  14831. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14832. template <typename Dispatch>
  14833. VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
  14834. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14835. {
  14836. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14837. return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
  14838. }
  14839. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14840. template <typename Dispatch>
  14841. VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
  14842. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14843. {
  14844. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14845. uint64_t result = d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  14846. return result;
  14847. }
  14848. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14849. template <typename Dispatch>
  14850. VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
  14851. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14852. {
  14853. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14854. return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
  14855. }
  14856. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14857. template <typename Dispatch>
  14858. VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,
  14859. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14860. {
  14861. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14862. uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
  14863. return result;
  14864. }
  14865. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14866. //=== VK_EXT_line_rasterization ===
  14867. template <typename Dispatch>
  14868. VULKAN_HPP_INLINE void
  14869. CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14870. {
  14871. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14872. d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
  14873. }
  14874. //=== VK_EXT_host_query_reset ===
  14875. template <typename Dispatch>
  14876. VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  14877. uint32_t firstQuery,
  14878. uint32_t queryCount,
  14879. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14880. {
  14881. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14882. d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
  14883. }
  14884. //=== VK_EXT_extended_dynamic_state ===
  14885. template <typename Dispatch>
  14886. VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14887. {
  14888. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14889. d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
  14890. }
  14891. template <typename Dispatch>
  14892. VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14893. {
  14894. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14895. d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
  14896. }
  14897. template <typename Dispatch>
  14898. VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,
  14899. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14900. {
  14901. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14902. d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
  14903. }
  14904. template <typename Dispatch>
  14905. VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount,
  14906. const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
  14907. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14908. {
  14909. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14910. d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
  14911. }
  14912. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14913. template <typename Dispatch>
  14914. VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
  14915. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14916. {
  14917. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14918. d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
  14919. }
  14920. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14921. template <typename Dispatch>
  14922. VULKAN_HPP_INLINE void
  14923. CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14924. {
  14925. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14926. d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
  14927. }
  14928. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14929. template <typename Dispatch>
  14930. VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
  14931. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14932. {
  14933. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14934. d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
  14935. }
  14936. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14937. template <typename Dispatch>
  14938. VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding,
  14939. uint32_t bindingCount,
  14940. const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
  14941. const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
  14942. const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
  14943. const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,
  14944. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14945. {
  14946. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14947. d.vkCmdBindVertexBuffers2EXT( m_commandBuffer,
  14948. firstBinding,
  14949. bindingCount,
  14950. reinterpret_cast<const VkBuffer *>( pBuffers ),
  14951. reinterpret_cast<const VkDeviceSize *>( pOffsets ),
  14952. reinterpret_cast<const VkDeviceSize *>( pSizes ),
  14953. reinterpret_cast<const VkDeviceSize *>( pStrides ) );
  14954. }
  14955. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14956. template <typename Dispatch>
  14957. VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding,
  14958. ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
  14959. ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
  14960. ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
  14961. ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,
  14962. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  14963. {
  14964. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14965. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  14966. VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
  14967. VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
  14968. VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
  14969. # else
  14970. if ( buffers.size() != offsets.size() )
  14971. {
  14972. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
  14973. }
  14974. if ( !sizes.empty() && buffers.size() != sizes.size() )
  14975. {
  14976. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
  14977. }
  14978. if ( !strides.empty() && buffers.size() != strides.size() )
  14979. {
  14980. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
  14981. }
  14982. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  14983. d.vkCmdBindVertexBuffers2EXT( m_commandBuffer,
  14984. firstBinding,
  14985. buffers.size(),
  14986. reinterpret_cast<const VkBuffer *>( buffers.data() ),
  14987. reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
  14988. reinterpret_cast<const VkDeviceSize *>( sizes.data() ),
  14989. reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
  14990. }
  14991. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14992. template <typename Dispatch>
  14993. VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14994. {
  14995. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14996. d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
  14997. }
  14998. template <typename Dispatch>
  14999. VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15000. {
  15001. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15002. d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
  15003. }
  15004. template <typename Dispatch>
  15005. VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15006. {
  15007. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15008. d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
  15009. }
  15010. template <typename Dispatch>
  15011. VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,
  15012. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15013. {
  15014. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15015. d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
  15016. }
  15017. template <typename Dispatch>
  15018. VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15019. {
  15020. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15021. d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
  15022. }
  15023. template <typename Dispatch>
  15024. VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
  15025. VULKAN_HPP_NAMESPACE::StencilOp failOp,
  15026. VULKAN_HPP_NAMESPACE::StencilOp passOp,
  15027. VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
  15028. VULKAN_HPP_NAMESPACE::CompareOp compareOp,
  15029. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15030. {
  15031. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15032. d.vkCmdSetStencilOpEXT( m_commandBuffer,
  15033. static_cast<VkStencilFaceFlags>( faceMask ),
  15034. static_cast<VkStencilOp>( failOp ),
  15035. static_cast<VkStencilOp>( passOp ),
  15036. static_cast<VkStencilOp>( depthFailOp ),
  15037. static_cast<VkCompareOp>( compareOp ) );
  15038. }
  15039. //=== VK_KHR_deferred_host_operations ===
  15040. template <typename Dispatch>
  15041. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  15042. VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation,
  15043. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15044. {
  15045. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15046. return static_cast<Result>( d.vkCreateDeferredOperationKHR(
  15047. m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) );
  15048. }
  15049. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15050. template <typename Dispatch>
  15051. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type
  15052. Device::createDeferredOperationKHR( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  15053. {
  15054. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15055. VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
  15056. VkResult result = d.vkCreateDeferredOperationKHR(
  15057. m_device,
  15058. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  15059. reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) );
  15060. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" );
  15061. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), deferredOperation );
  15062. }
  15063. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  15064. template <typename Dispatch>
  15065. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type
  15066. Device::createDeferredOperationKHRUnique( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  15067. {
  15068. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15069. VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
  15070. VkResult result = d.vkCreateDeferredOperationKHR(
  15071. m_device,
  15072. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  15073. reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) );
  15074. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" );
  15075. return createResultValueType(
  15076. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  15077. UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>( deferredOperation, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  15078. }
  15079. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  15080. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15081. template <typename Dispatch>
  15082. VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
  15083. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  15084. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15085. {
  15086. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15087. d.vkDestroyDeferredOperationKHR(
  15088. m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  15089. }
  15090. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15091. template <typename Dispatch>
  15092. VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
  15093. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  15094. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15095. {
  15096. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15097. d.vkDestroyDeferredOperationKHR(
  15098. m_device,
  15099. static_cast<VkDeferredOperationKHR>( operation ),
  15100. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  15101. }
  15102. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15103. template <typename Dispatch>
  15104. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
  15105. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  15106. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15107. {
  15108. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15109. d.vkDestroyDeferredOperationKHR(
  15110. m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  15111. }
  15112. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15113. template <typename Dispatch>
  15114. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
  15115. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  15116. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15117. {
  15118. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15119. d.vkDestroyDeferredOperationKHR(
  15120. m_device,
  15121. static_cast<VkDeferredOperationKHR>( operation ),
  15122. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  15123. }
  15124. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15125. template <typename Dispatch>
  15126. VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
  15127. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15128. {
  15129. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15130. return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
  15131. }
  15132. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15133. template <typename Dispatch>
  15134. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
  15135. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15136. {
  15137. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15138. return static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
  15139. }
  15140. #else
  15141. template <typename Dispatch>
  15142. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  15143. Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15144. {
  15145. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15146. VkResult result = d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
  15147. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  15148. }
  15149. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15150. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15151. template <typename Dispatch>
  15152. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
  15153. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15154. {
  15155. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15156. return static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
  15157. }
  15158. #else
  15159. template <typename Dispatch>
  15160. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
  15161. Dispatch const & d ) const
  15162. {
  15163. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15164. VkResult result = d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
  15165. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  15166. VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR",
  15167. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } );
  15168. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  15169. }
  15170. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15171. //=== VK_KHR_pipeline_executable_properties ===
  15172. template <typename Dispatch>
  15173. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo,
  15174. uint32_t * pExecutableCount,
  15175. VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties,
  15176. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15177. {
  15178. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15179. return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device,
  15180. reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ),
  15181. pExecutableCount,
  15182. reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) );
  15183. }
  15184. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15185. template <typename PipelineExecutablePropertiesKHRAllocator, typename Dispatch>
  15186. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  15187. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
  15188. Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const
  15189. {
  15190. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15191. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties;
  15192. uint32_t executableCount;
  15193. VkResult result;
  15194. do
  15195. {
  15196. result = d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr );
  15197. if ( ( result == VK_SUCCESS ) && executableCount )
  15198. {
  15199. properties.resize( executableCount );
  15200. result = d.vkGetPipelineExecutablePropertiesKHR( m_device,
  15201. reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
  15202. &executableCount,
  15203. reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) );
  15204. }
  15205. } while ( result == VK_INCOMPLETE );
  15206. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
  15207. VULKAN_HPP_ASSERT( executableCount <= properties.size() );
  15208. if ( executableCount < properties.size() )
  15209. {
  15210. properties.resize( executableCount );
  15211. }
  15212. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  15213. }
  15214. template <typename PipelineExecutablePropertiesKHRAllocator,
  15215. typename Dispatch,
  15216. typename B1,
  15217. typename std::enable_if<std::is_same<typename B1::value_type, PipelineExecutablePropertiesKHR>::value, int>::type>
  15218. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  15219. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
  15220. Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo,
  15221. PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,
  15222. Dispatch const & d ) const
  15223. {
  15224. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15225. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties(
  15226. pipelineExecutablePropertiesKHRAllocator );
  15227. uint32_t executableCount;
  15228. VkResult result;
  15229. do
  15230. {
  15231. result = d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr );
  15232. if ( ( result == VK_SUCCESS ) && executableCount )
  15233. {
  15234. properties.resize( executableCount );
  15235. result = d.vkGetPipelineExecutablePropertiesKHR( m_device,
  15236. reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
  15237. &executableCount,
  15238. reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) );
  15239. }
  15240. } while ( result == VK_INCOMPLETE );
  15241. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
  15242. VULKAN_HPP_ASSERT( executableCount <= properties.size() );
  15243. if ( executableCount < properties.size() )
  15244. {
  15245. properties.resize( executableCount );
  15246. }
  15247. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  15248. }
  15249. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15250. template <typename Dispatch>
  15251. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  15252. Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
  15253. uint32_t * pStatisticCount,
  15254. VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics,
  15255. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15256. {
  15257. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15258. return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device,
  15259. reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
  15260. pStatisticCount,
  15261. reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) );
  15262. }
  15263. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15264. template <typename PipelineExecutableStatisticKHRAllocator, typename Dispatch>
  15265. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  15266. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
  15267. Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const
  15268. {
  15269. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15270. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics;
  15271. uint32_t statisticCount;
  15272. VkResult result;
  15273. do
  15274. {
  15275. result =
  15276. d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr );
  15277. if ( ( result == VK_SUCCESS ) && statisticCount )
  15278. {
  15279. statistics.resize( statisticCount );
  15280. result = d.vkGetPipelineExecutableStatisticsKHR( m_device,
  15281. reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
  15282. &statisticCount,
  15283. reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) );
  15284. }
  15285. } while ( result == VK_INCOMPLETE );
  15286. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
  15287. VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
  15288. if ( statisticCount < statistics.size() )
  15289. {
  15290. statistics.resize( statisticCount );
  15291. }
  15292. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), statistics );
  15293. }
  15294. template <typename PipelineExecutableStatisticKHRAllocator,
  15295. typename Dispatch,
  15296. typename B1,
  15297. typename std::enable_if<std::is_same<typename B1::value_type, PipelineExecutableStatisticKHR>::value, int>::type>
  15298. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  15299. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
  15300. Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,
  15301. PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,
  15302. Dispatch const & d ) const
  15303. {
  15304. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15305. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics(
  15306. pipelineExecutableStatisticKHRAllocator );
  15307. uint32_t statisticCount;
  15308. VkResult result;
  15309. do
  15310. {
  15311. result =
  15312. d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr );
  15313. if ( ( result == VK_SUCCESS ) && statisticCount )
  15314. {
  15315. statistics.resize( statisticCount );
  15316. result = d.vkGetPipelineExecutableStatisticsKHR( m_device,
  15317. reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
  15318. &statisticCount,
  15319. reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) );
  15320. }
  15321. } while ( result == VK_INCOMPLETE );
  15322. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
  15323. VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
  15324. if ( statisticCount < statistics.size() )
  15325. {
  15326. statistics.resize( statisticCount );
  15327. }
  15328. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), statistics );
  15329. }
  15330. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15331. template <typename Dispatch>
  15332. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  15333. Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
  15334. uint32_t * pInternalRepresentationCount,
  15335. VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations,
  15336. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15337. {
  15338. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15339. return static_cast<Result>(
  15340. d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device,
  15341. reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
  15342. pInternalRepresentationCount,
  15343. reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) );
  15344. }
  15345. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15346. template <typename PipelineExecutableInternalRepresentationKHRAllocator, typename Dispatch>
  15347. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
  15348. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type
  15349. Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const
  15350. {
  15351. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15352. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
  15353. internalRepresentations;
  15354. uint32_t internalRepresentationCount;
  15355. VkResult result;
  15356. do
  15357. {
  15358. result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
  15359. m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr );
  15360. if ( ( result == VK_SUCCESS ) && internalRepresentationCount )
  15361. {
  15362. internalRepresentations.resize( internalRepresentationCount );
  15363. result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
  15364. m_device,
  15365. reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
  15366. &internalRepresentationCount,
  15367. reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) );
  15368. }
  15369. } while ( result == VK_INCOMPLETE );
  15370. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
  15371. VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
  15372. if ( internalRepresentationCount < internalRepresentations.size() )
  15373. {
  15374. internalRepresentations.resize( internalRepresentationCount );
  15375. }
  15376. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), internalRepresentations );
  15377. }
  15378. template <typename PipelineExecutableInternalRepresentationKHRAllocator,
  15379. typename Dispatch,
  15380. typename B1,
  15381. typename std::enable_if<std::is_same<typename B1::value_type, PipelineExecutableInternalRepresentationKHR>::value, int>::type>
  15382. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
  15383. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type
  15384. Device::getPipelineExecutableInternalRepresentationsKHR(
  15385. const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,
  15386. PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator,
  15387. Dispatch const & d ) const
  15388. {
  15389. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15390. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
  15391. internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator );
  15392. uint32_t internalRepresentationCount;
  15393. VkResult result;
  15394. do
  15395. {
  15396. result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
  15397. m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr );
  15398. if ( ( result == VK_SUCCESS ) && internalRepresentationCount )
  15399. {
  15400. internalRepresentations.resize( internalRepresentationCount );
  15401. result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
  15402. m_device,
  15403. reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
  15404. &internalRepresentationCount,
  15405. reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) );
  15406. }
  15407. } while ( result == VK_INCOMPLETE );
  15408. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
  15409. VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
  15410. if ( internalRepresentationCount < internalRepresentations.size() )
  15411. {
  15412. internalRepresentations.resize( internalRepresentationCount );
  15413. }
  15414. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), internalRepresentations );
  15415. }
  15416. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15417. //=== VK_NV_device_generated_commands ===
  15418. template <typename Dispatch>
  15419. VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo,
  15420. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  15421. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15422. {
  15423. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15424. d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
  15425. reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ),
  15426. reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  15427. }
  15428. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15429. template <typename Dispatch>
  15430. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  15431. Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,
  15432. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15433. {
  15434. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15435. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  15436. d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
  15437. reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
  15438. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  15439. return memoryRequirements;
  15440. }
  15441. template <typename X, typename Y, typename... Z, typename Dispatch>
  15442. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
  15443. Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,
  15444. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15445. {
  15446. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15447. StructureChain<X, Y, Z...> structureChain;
  15448. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  15449. d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
  15450. reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
  15451. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  15452. return structureChain;
  15453. }
  15454. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15455. template <typename Dispatch>
  15456. VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
  15457. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15458. {
  15459. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15460. d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
  15461. }
  15462. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15463. template <typename Dispatch>
  15464. VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,
  15465. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15466. {
  15467. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15468. d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
  15469. }
  15470. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15471. template <typename Dispatch>
  15472. VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
  15473. const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
  15474. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15475. {
  15476. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15477. d.vkCmdExecuteGeneratedCommandsNV(
  15478. m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
  15479. }
  15480. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15481. template <typename Dispatch>
  15482. VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
  15483. const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,
  15484. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15485. {
  15486. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15487. d.vkCmdExecuteGeneratedCommandsNV(
  15488. m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
  15489. }
  15490. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15491. template <typename Dispatch>
  15492. VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  15493. VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  15494. uint32_t groupIndex,
  15495. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15496. {
  15497. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15498. d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex );
  15499. }
  15500. template <typename Dispatch>
  15501. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  15502. Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo,
  15503. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  15504. VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout,
  15505. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15506. {
  15507. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15508. return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device,
  15509. reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ),
  15510. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  15511. reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) );
  15512. }
  15513. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15514. template <typename Dispatch>
  15515. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type
  15516. Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,
  15517. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  15518. Dispatch const & d ) const
  15519. {
  15520. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15521. VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
  15522. VkResult result = d.vkCreateIndirectCommandsLayoutNV(
  15523. m_device,
  15524. reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
  15525. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  15526. reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) );
  15527. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" );
  15528. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), indirectCommandsLayout );
  15529. }
  15530. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  15531. template <typename Dispatch>
  15532. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type
  15533. Device::createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,
  15534. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  15535. Dispatch const & d ) const
  15536. {
  15537. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15538. VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
  15539. VkResult result = d.vkCreateIndirectCommandsLayoutNV(
  15540. m_device,
  15541. reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
  15542. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  15543. reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) );
  15544. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" );
  15545. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  15546. UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>(
  15547. indirectCommandsLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  15548. }
  15549. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  15550. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15551. template <typename Dispatch>
  15552. VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
  15553. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  15554. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15555. {
  15556. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15557. d.vkDestroyIndirectCommandsLayoutNV(
  15558. m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  15559. }
  15560. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15561. template <typename Dispatch>
  15562. VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
  15563. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  15564. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15565. {
  15566. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15567. d.vkDestroyIndirectCommandsLayoutNV(
  15568. m_device,
  15569. static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
  15570. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  15571. }
  15572. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15573. template <typename Dispatch>
  15574. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
  15575. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  15576. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15577. {
  15578. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15579. d.vkDestroyIndirectCommandsLayoutNV(
  15580. m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  15581. }
  15582. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15583. template <typename Dispatch>
  15584. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
  15585. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  15586. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15587. {
  15588. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15589. d.vkDestroyIndirectCommandsLayoutNV(
  15590. m_device,
  15591. static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
  15592. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  15593. }
  15594. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15595. //=== VK_EXT_acquire_drm_display ===
  15596. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15597. template <typename Dispatch>
  15598. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd,
  15599. VULKAN_HPP_NAMESPACE::DisplayKHR display,
  15600. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15601. {
  15602. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15603. return static_cast<Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) );
  15604. }
  15605. #else
  15606. template <typename Dispatch>
  15607. VULKAN_HPP_INLINE typename ResultValueType<void>::type
  15608. PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  15609. {
  15610. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15611. VkResult result = d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) );
  15612. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" );
  15613. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  15614. }
  15615. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15616. template <typename Dispatch>
  15617. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDrmDisplayEXT( int32_t drmFd,
  15618. uint32_t connectorId,
  15619. VULKAN_HPP_NAMESPACE::DisplayKHR * display,
  15620. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15621. {
  15622. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15623. return static_cast<Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( display ) ) );
  15624. }
  15625. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15626. template <typename Dispatch>
  15627. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
  15628. PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const
  15629. {
  15630. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15631. VULKAN_HPP_NAMESPACE::DisplayKHR display;
  15632. VkResult result = d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) );
  15633. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" );
  15634. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), display );
  15635. }
  15636. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  15637. template <typename Dispatch>
  15638. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
  15639. PhysicalDevice::getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const
  15640. {
  15641. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15642. VULKAN_HPP_NAMESPACE::DisplayKHR display;
  15643. VkResult result = d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) );
  15644. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" );
  15645. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  15646. UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) );
  15647. }
  15648. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  15649. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15650. //=== VK_EXT_private_data ===
  15651. template <typename Dispatch>
  15652. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,
  15653. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  15654. VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,
  15655. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15656. {
  15657. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15658. return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device,
  15659. reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ),
  15660. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  15661. reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
  15662. }
  15663. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15664. template <typename Dispatch>
  15665. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type
  15666. Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
  15667. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  15668. Dispatch const & d ) const
  15669. {
  15670. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15671. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
  15672. VkResult result = d.vkCreatePrivateDataSlotEXT(
  15673. m_device,
  15674. reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
  15675. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  15676. reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
  15677. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" );
  15678. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), privateDataSlot );
  15679. }
  15680. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  15681. template <typename Dispatch>
  15682. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type
  15683. Device::createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
  15684. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  15685. Dispatch const & d ) const
  15686. {
  15687. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15688. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
  15689. VkResult result = d.vkCreatePrivateDataSlotEXT(
  15690. m_device,
  15691. reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
  15692. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  15693. reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
  15694. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" );
  15695. return createResultValueType(
  15696. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  15697. UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  15698. }
  15699. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  15700. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15701. template <typename Dispatch>
  15702. VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  15703. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  15704. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15705. {
  15706. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15707. d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  15708. }
  15709. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15710. template <typename Dispatch>
  15711. VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  15712. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  15713. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15714. {
  15715. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15716. d.vkDestroyPrivateDataSlotEXT(
  15717. m_device,
  15718. static_cast<VkPrivateDataSlot>( privateDataSlot ),
  15719. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  15720. }
  15721. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15722. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15723. template <typename Dispatch>
  15724. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
  15725. uint64_t objectHandle,
  15726. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  15727. uint64_t data,
  15728. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15729. {
  15730. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15731. return static_cast<Result>(
  15732. d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
  15733. }
  15734. #else
  15735. template <typename Dispatch>
  15736. VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
  15737. uint64_t objectHandle,
  15738. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  15739. uint64_t data,
  15740. Dispatch const & d ) const
  15741. {
  15742. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15743. VkResult result =
  15744. d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
  15745. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" );
  15746. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  15747. }
  15748. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15749. template <typename Dispatch>
  15750. VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
  15751. uint64_t objectHandle,
  15752. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  15753. uint64_t * pData,
  15754. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15755. {
  15756. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15757. d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
  15758. }
  15759. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15760. template <typename Dispatch>
  15761. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType,
  15762. uint64_t objectHandle,
  15763. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  15764. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15765. {
  15766. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15767. uint64_t data;
  15768. d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
  15769. return data;
  15770. }
  15771. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15772. #if defined( VK_ENABLE_BETA_EXTENSIONS )
  15773. //=== VK_KHR_video_encode_queue ===
  15774. template <typename Dispatch>
  15775. VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo,
  15776. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15777. {
  15778. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15779. d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) );
  15780. }
  15781. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15782. template <typename Dispatch>
  15783. VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo,
  15784. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15785. {
  15786. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15787. d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) );
  15788. }
  15789. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15790. #endif /*VK_ENABLE_BETA_EXTENSIONS*/
  15791. #if defined( VK_USE_PLATFORM_METAL_EXT )
  15792. //=== VK_EXT_metal_objects ===
  15793. template <typename Dispatch>
  15794. VULKAN_HPP_INLINE void Device::exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo,
  15795. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15796. {
  15797. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15798. d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( pMetalObjectsInfo ) );
  15799. }
  15800. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15801. template <typename Dispatch>
  15802. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT
  15803. Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15804. {
  15805. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15806. VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo;
  15807. d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
  15808. return metalObjectsInfo;
  15809. }
  15810. template <typename X, typename Y, typename... Z, typename Dispatch>
  15811. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15812. {
  15813. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15814. StructureChain<X, Y, Z...> structureChain;
  15815. VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>();
  15816. d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
  15817. return structureChain;
  15818. }
  15819. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15820. #endif /*VK_USE_PLATFORM_METAL_EXT*/
  15821. //=== VK_KHR_synchronization2 ===
  15822. template <typename Dispatch>
  15823. VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
  15824. const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
  15825. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15826. {
  15827. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15828. d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
  15829. }
  15830. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15831. template <typename Dispatch>
  15832. VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
  15833. const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
  15834. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15835. {
  15836. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15837. d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
  15838. }
  15839. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15840. template <typename Dispatch>
  15841. VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
  15842. VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask,
  15843. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15844. {
  15845. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15846. d.vkCmdResetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
  15847. }
  15848. template <typename Dispatch>
  15849. VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( uint32_t eventCount,
  15850. const VULKAN_HPP_NAMESPACE::Event * pEvents,
  15851. const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,
  15852. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15853. {
  15854. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15855. d.vkCmdWaitEvents2KHR(
  15856. m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
  15857. }
  15858. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15859. template <typename Dispatch>
  15860. VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
  15861. ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,
  15862. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  15863. {
  15864. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15865. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  15866. VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
  15867. # else
  15868. if ( events.size() != dependencyInfos.size() )
  15869. {
  15870. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" );
  15871. }
  15872. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  15873. d.vkCmdWaitEvents2KHR( m_commandBuffer,
  15874. events.size(),
  15875. reinterpret_cast<const VkEvent *>( events.data() ),
  15876. reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
  15877. }
  15878. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15879. template <typename Dispatch>
  15880. VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
  15881. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15882. {
  15883. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15884. d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
  15885. }
  15886. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15887. template <typename Dispatch>
  15888. VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
  15889. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15890. {
  15891. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15892. d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
  15893. }
  15894. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15895. template <typename Dispatch>
  15896. VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
  15897. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  15898. uint32_t query,
  15899. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15900. {
  15901. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15902. d.vkCmdWriteTimestamp2KHR( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
  15903. }
  15904. template <typename Dispatch>
  15905. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2KHR( uint32_t submitCount,
  15906. const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,
  15907. VULKAN_HPP_NAMESPACE::Fence fence,
  15908. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15909. {
  15910. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15911. return static_cast<Result>(
  15912. d.vkQueueSubmit2KHR( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
  15913. }
  15914. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15915. template <typename Dispatch>
  15916. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  15917. Queue::submit2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
  15918. {
  15919. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15920. VkResult result = d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) );
  15921. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" );
  15922. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  15923. }
  15924. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15925. template <typename Dispatch>
  15926. VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
  15927. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  15928. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  15929. uint32_t marker,
  15930. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15931. {
  15932. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15933. d.vkCmdWriteBufferMarker2AMD(
  15934. m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
  15935. }
  15936. template <typename Dispatch>
  15937. VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount,
  15938. VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData,
  15939. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15940. {
  15941. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15942. d.vkGetQueueCheckpointData2NV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) );
  15943. }
  15944. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15945. template <typename CheckpointData2NVAllocator, typename Dispatch>
  15946. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator>
  15947. Queue::getCheckpointData2NV( Dispatch const & d ) const
  15948. {
  15949. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15950. std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData;
  15951. uint32_t checkpointDataCount;
  15952. d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
  15953. checkpointData.resize( checkpointDataCount );
  15954. d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
  15955. VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
  15956. if ( checkpointDataCount < checkpointData.size() )
  15957. {
  15958. checkpointData.resize( checkpointDataCount );
  15959. }
  15960. return checkpointData;
  15961. }
  15962. template <typename CheckpointData2NVAllocator,
  15963. typename Dispatch,
  15964. typename B1,
  15965. typename std::enable_if<std::is_same<typename B1::value_type, CheckpointData2NV>::value, int>::type>
  15966. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator>
  15967. Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const
  15968. {
  15969. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15970. std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator );
  15971. uint32_t checkpointDataCount;
  15972. d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
  15973. checkpointData.resize( checkpointDataCount );
  15974. d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
  15975. VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
  15976. if ( checkpointDataCount < checkpointData.size() )
  15977. {
  15978. checkpointData.resize( checkpointDataCount );
  15979. }
  15980. return checkpointData;
  15981. }
  15982. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15983. //=== VK_NV_fragment_shading_rate_enums ===
  15984. template <typename Dispatch>
  15985. VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,
  15986. const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
  15987. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15988. {
  15989. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15990. d.vkCmdSetFragmentShadingRateEnumNV(
  15991. m_commandBuffer, static_cast<VkFragmentShadingRateNV>( shadingRate ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
  15992. }
  15993. //=== VK_EXT_mesh_shader ===
  15994. template <typename Dispatch>
  15995. VULKAN_HPP_INLINE void
  15996. CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15997. {
  15998. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15999. d.vkCmdDrawMeshTasksEXT( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
  16000. }
  16001. template <typename Dispatch>
  16002. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
  16003. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  16004. uint32_t drawCount,
  16005. uint32_t stride,
  16006. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16007. {
  16008. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16009. d.vkCmdDrawMeshTasksIndirectEXT( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
  16010. }
  16011. template <typename Dispatch>
  16012. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
  16013. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  16014. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  16015. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  16016. uint32_t maxDrawCount,
  16017. uint32_t stride,
  16018. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16019. {
  16020. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16021. d.vkCmdDrawMeshTasksIndirectCountEXT( m_commandBuffer,
  16022. static_cast<VkBuffer>( buffer ),
  16023. static_cast<VkDeviceSize>( offset ),
  16024. static_cast<VkBuffer>( countBuffer ),
  16025. static_cast<VkDeviceSize>( countBufferOffset ),
  16026. maxDrawCount,
  16027. stride );
  16028. }
  16029. //=== VK_KHR_copy_commands2 ===
  16030. template <typename Dispatch>
  16031. VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,
  16032. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16033. {
  16034. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16035. d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
  16036. }
  16037. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16038. template <typename Dispatch>
  16039. VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,
  16040. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16041. {
  16042. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16043. d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
  16044. }
  16045. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16046. template <typename Dispatch>
  16047. VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo,
  16048. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16049. {
  16050. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16051. d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
  16052. }
  16053. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16054. template <typename Dispatch>
  16055. VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo,
  16056. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16057. {
  16058. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16059. d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
  16060. }
  16061. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16062. template <typename Dispatch>
  16063. VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,
  16064. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16065. {
  16066. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16067. d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
  16068. }
  16069. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16070. template <typename Dispatch>
  16071. VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,
  16072. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16073. {
  16074. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16075. d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
  16076. }
  16077. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16078. template <typename Dispatch>
  16079. VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,
  16080. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16081. {
  16082. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16083. d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
  16084. }
  16085. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16086. template <typename Dispatch>
  16087. VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,
  16088. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16089. {
  16090. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16091. d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
  16092. }
  16093. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16094. template <typename Dispatch>
  16095. VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo,
  16096. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16097. {
  16098. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16099. d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
  16100. }
  16101. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16102. template <typename Dispatch>
  16103. VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo,
  16104. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16105. {
  16106. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16107. d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
  16108. }
  16109. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16110. template <typename Dispatch>
  16111. VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,
  16112. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16113. {
  16114. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16115. d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
  16116. }
  16117. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16118. template <typename Dispatch>
  16119. VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,
  16120. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16121. {
  16122. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16123. d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
  16124. }
  16125. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16126. //=== VK_EXT_image_compression_control ===
  16127. template <typename Dispatch>
  16128. VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
  16129. const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT * pSubresource,
  16130. VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT * pLayout,
  16131. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16132. {
  16133. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16134. d.vkGetImageSubresourceLayout2EXT( m_device,
  16135. static_cast<VkImage>( image ),
  16136. reinterpret_cast<const VkImageSubresource2EXT *>( pSubresource ),
  16137. reinterpret_cast<VkSubresourceLayout2EXT *>( pLayout ) );
  16138. }
  16139. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16140. template <typename Dispatch>
  16141. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT Device::getImageSubresourceLayout2EXT(
  16142. VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16143. {
  16144. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16145. VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT layout;
  16146. d.vkGetImageSubresourceLayout2EXT( m_device,
  16147. static_cast<VkImage>( image ),
  16148. reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
  16149. reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
  16150. return layout;
  16151. }
  16152. template <typename X, typename Y, typename... Z, typename Dispatch>
  16153. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2EXT(
  16154. VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2EXT & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16155. {
  16156. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16157. StructureChain<X, Y, Z...> structureChain;
  16158. VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT>();
  16159. d.vkGetImageSubresourceLayout2EXT( m_device,
  16160. static_cast<VkImage>( image ),
  16161. reinterpret_cast<const VkImageSubresource2EXT *>( &subresource ),
  16162. reinterpret_cast<VkSubresourceLayout2EXT *>( &layout ) );
  16163. return structureChain;
  16164. }
  16165. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16166. #if defined( VK_USE_PLATFORM_WIN32_KHR )
  16167. //=== VK_NV_acquire_winrt_display ===
  16168. # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16169. template <typename Dispatch>
  16170. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  16171. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16172. {
  16173. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16174. return static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
  16175. }
  16176. # else
  16177. template <typename Dispatch>
  16178. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  16179. PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  16180. {
  16181. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16182. VkResult result = d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) );
  16183. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" );
  16184. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  16185. }
  16186. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16187. template <typename Dispatch>
  16188. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId,
  16189. VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
  16190. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16191. {
  16192. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16193. return static_cast<Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
  16194. }
  16195. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16196. template <typename Dispatch>
  16197. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
  16198. PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const
  16199. {
  16200. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16201. VULKAN_HPP_NAMESPACE::DisplayKHR display;
  16202. VkResult result = d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) );
  16203. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" );
  16204. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), display );
  16205. }
  16206. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  16207. template <typename Dispatch>
  16208. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
  16209. PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const
  16210. {
  16211. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16212. VULKAN_HPP_NAMESPACE::DisplayKHR display;
  16213. VkResult result = d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) );
  16214. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" );
  16215. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16216. UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) );
  16217. }
  16218. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  16219. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16220. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  16221. #if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
  16222. //=== VK_EXT_directfb_surface ===
  16223. template <typename Dispatch>
  16224. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,
  16225. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  16226. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  16227. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16228. {
  16229. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16230. return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance,
  16231. reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ),
  16232. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  16233. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  16234. }
  16235. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16236. template <typename Dispatch>
  16237. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  16238. Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,
  16239. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16240. Dispatch const & d ) const
  16241. {
  16242. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16243. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  16244. VkResult result = d.vkCreateDirectFBSurfaceEXT(
  16245. m_instance,
  16246. reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
  16247. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  16248. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  16249. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" );
  16250. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  16251. }
  16252. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  16253. template <typename Dispatch>
  16254. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  16255. Instance::createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,
  16256. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16257. Dispatch const & d ) const
  16258. {
  16259. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16260. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  16261. VkResult result = d.vkCreateDirectFBSurfaceEXT(
  16262. m_instance,
  16263. reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
  16264. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  16265. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  16266. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" );
  16267. return createResultValueType(
  16268. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16269. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  16270. }
  16271. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  16272. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16273. template <typename Dispatch>
  16274. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex,
  16275. IDirectFB * dfb,
  16276. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16277. {
  16278. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16279. return static_cast<Bool32>( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) );
  16280. }
  16281. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16282. template <typename Dispatch>
  16283. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
  16284. PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16285. {
  16286. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16287. VkBool32 result = d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb );
  16288. return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
  16289. }
  16290. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16291. #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  16292. //=== VK_KHR_ray_tracing_pipeline ===
  16293. template <typename Dispatch>
  16294. VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
  16295. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
  16296. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
  16297. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
  16298. uint32_t width,
  16299. uint32_t height,
  16300. uint32_t depth,
  16301. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16302. {
  16303. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16304. d.vkCmdTraceRaysKHR( m_commandBuffer,
  16305. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
  16306. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
  16307. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
  16308. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ),
  16309. width,
  16310. height,
  16311. depth );
  16312. }
  16313. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16314. template <typename Dispatch>
  16315. VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
  16316. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
  16317. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
  16318. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
  16319. uint32_t width,
  16320. uint32_t height,
  16321. uint32_t depth,
  16322. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16323. {
  16324. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16325. d.vkCmdTraceRaysKHR( m_commandBuffer,
  16326. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
  16327. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
  16328. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
  16329. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
  16330. width,
  16331. height,
  16332. depth );
  16333. }
  16334. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16335. template <typename Dispatch>
  16336. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  16337. Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  16338. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  16339. uint32_t createInfoCount,
  16340. const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos,
  16341. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  16342. VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
  16343. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16344. {
  16345. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16346. return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device,
  16347. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  16348. static_cast<VkPipelineCache>( pipelineCache ),
  16349. createInfoCount,
  16350. reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ),
  16351. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  16352. reinterpret_cast<VkPipeline *>( pPipelines ) ) );
  16353. }
  16354. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16355. template <typename PipelineAllocator, typename Dispatch>
  16356. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
  16357. Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  16358. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  16359. ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
  16360. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16361. Dispatch const & d ) const
  16362. {
  16363. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16364. std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
  16365. VkResult result = d.vkCreateRayTracingPipelinesKHR(
  16366. m_device,
  16367. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  16368. static_cast<VkPipelineCache>( pipelineCache ),
  16369. createInfos.size(),
  16370. reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
  16371. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  16372. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  16373. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16374. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR",
  16375. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  16376. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  16377. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
  16378. VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  16379. return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
  16380. }
  16381. template <typename PipelineAllocator,
  16382. typename Dispatch,
  16383. typename B0,
  16384. typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
  16385. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
  16386. Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  16387. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  16388. ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
  16389. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16390. PipelineAllocator & pipelineAllocator,
  16391. Dispatch const & d ) const
  16392. {
  16393. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16394. std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
  16395. VkResult result = d.vkCreateRayTracingPipelinesKHR(
  16396. m_device,
  16397. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  16398. static_cast<VkPipelineCache>( pipelineCache ),
  16399. createInfos.size(),
  16400. reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
  16401. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  16402. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  16403. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16404. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR",
  16405. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  16406. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  16407. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
  16408. VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  16409. return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
  16410. }
  16411. template <typename Dispatch>
  16412. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
  16413. Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  16414. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  16415. const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
  16416. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16417. Dispatch const & d ) const
  16418. {
  16419. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16420. VULKAN_HPP_NAMESPACE::Pipeline pipeline;
  16421. VkResult result = d.vkCreateRayTracingPipelinesKHR(
  16422. m_device,
  16423. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  16424. static_cast<VkPipelineCache>( pipelineCache ),
  16425. 1,
  16426. reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
  16427. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  16428. reinterpret_cast<VkPipeline *>( &pipeline ) );
  16429. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16430. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR",
  16431. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  16432. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  16433. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
  16434. VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  16435. return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
  16436. }
  16437. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  16438. template <typename Dispatch, typename PipelineAllocator>
  16439. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
  16440. Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  16441. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  16442. ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
  16443. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16444. Dispatch const & d ) const
  16445. {
  16446. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16447. std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
  16448. VkResult result = d.vkCreateRayTracingPipelinesKHR(
  16449. m_device,
  16450. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  16451. static_cast<VkPipelineCache>( pipelineCache ),
  16452. createInfos.size(),
  16453. reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
  16454. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  16455. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  16456. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16457. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique",
  16458. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  16459. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  16460. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
  16461. VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  16462. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
  16463. uniquePipelines.reserve( createInfos.size() );
  16464. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  16465. for ( auto const & pipeline : pipelines )
  16466. {
  16467. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
  16468. }
  16469. return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
  16470. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
  16471. }
  16472. template <typename Dispatch,
  16473. typename PipelineAllocator,
  16474. typename B0,
  16475. typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
  16476. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
  16477. Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  16478. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  16479. ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
  16480. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16481. PipelineAllocator & pipelineAllocator,
  16482. Dispatch const & d ) const
  16483. {
  16484. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16485. std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
  16486. VkResult result = d.vkCreateRayTracingPipelinesKHR(
  16487. m_device,
  16488. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  16489. static_cast<VkPipelineCache>( pipelineCache ),
  16490. createInfos.size(),
  16491. reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
  16492. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  16493. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  16494. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16495. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique",
  16496. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  16497. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  16498. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
  16499. VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  16500. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
  16501. uniquePipelines.reserve( createInfos.size() );
  16502. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  16503. for ( auto const & pipeline : pipelines )
  16504. {
  16505. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
  16506. }
  16507. return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
  16508. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
  16509. }
  16510. template <typename Dispatch>
  16511. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
  16512. Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  16513. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  16514. const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
  16515. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16516. Dispatch const & d ) const
  16517. {
  16518. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16519. VULKAN_HPP_NAMESPACE::Pipeline pipeline;
  16520. VkResult result = d.vkCreateRayTracingPipelinesKHR(
  16521. m_device,
  16522. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  16523. static_cast<VkPipelineCache>( pipelineCache ),
  16524. 1,
  16525. reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
  16526. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  16527. reinterpret_cast<VkPipeline *>( &pipeline ) );
  16528. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16529. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique",
  16530. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  16531. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  16532. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
  16533. VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  16534. return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>(
  16535. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16536. UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  16537. }
  16538. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  16539. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16540. template <typename Dispatch>
  16541. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  16542. uint32_t firstGroup,
  16543. uint32_t groupCount,
  16544. size_t dataSize,
  16545. void * pData,
  16546. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16547. {
  16548. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16549. return static_cast<Result>(
  16550. d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
  16551. }
  16552. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16553. template <typename DataType, typename DataTypeAllocator, typename Dispatch>
  16554. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::getRayTracingShaderGroupHandlesKHR(
  16555. VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
  16556. {
  16557. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16558. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  16559. std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
  16560. VkResult result = d.vkGetRayTracingShaderGroupHandlesKHR(
  16561. m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
  16562. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" );
  16563. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  16564. }
  16565. template <typename DataType, typename Dispatch>
  16566. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
  16567. Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
  16568. {
  16569. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16570. DataType data;
  16571. VkResult result = d.vkGetRayTracingShaderGroupHandlesKHR(
  16572. m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) );
  16573. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" );
  16574. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  16575. }
  16576. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16577. template <typename Dispatch>
  16578. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  16579. uint32_t firstGroup,
  16580. uint32_t groupCount,
  16581. size_t dataSize,
  16582. void * pData,
  16583. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16584. {
  16585. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16586. return static_cast<Result>(
  16587. d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
  16588. }
  16589. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16590. template <typename DataType, typename DataTypeAllocator, typename Dispatch>
  16591. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type
  16592. Device::getRayTracingCaptureReplayShaderGroupHandlesKHR(
  16593. VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
  16594. {
  16595. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16596. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  16597. std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
  16598. VkResult result = d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
  16599. m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
  16600. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
  16601. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  16602. }
  16603. template <typename DataType, typename Dispatch>
  16604. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR(
  16605. VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
  16606. {
  16607. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16608. DataType data;
  16609. VkResult result = d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
  16610. m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) );
  16611. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" );
  16612. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  16613. }
  16614. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16615. template <typename Dispatch>
  16616. VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
  16617. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
  16618. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
  16619. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
  16620. VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
  16621. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16622. {
  16623. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16624. d.vkCmdTraceRaysIndirectKHR( m_commandBuffer,
  16625. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
  16626. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
  16627. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
  16628. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ),
  16629. static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
  16630. }
  16631. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16632. template <typename Dispatch>
  16633. VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
  16634. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
  16635. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
  16636. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
  16637. VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
  16638. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16639. {
  16640. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16641. d.vkCmdTraceRaysIndirectKHR( m_commandBuffer,
  16642. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
  16643. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
  16644. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
  16645. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
  16646. static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
  16647. }
  16648. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16649. template <typename Dispatch>
  16650. VULKAN_HPP_INLINE DeviceSize Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  16651. uint32_t group,
  16652. VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader,
  16653. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16654. {
  16655. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16656. return static_cast<DeviceSize>(
  16657. d.vkGetRayTracingShaderGroupStackSizeKHR( m_device, static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) );
  16658. }
  16659. template <typename Dispatch>
  16660. VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16661. {
  16662. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16663. d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize );
  16664. }
  16665. //=== VK_EXT_vertex_input_dynamic_state ===
  16666. template <typename Dispatch>
  16667. VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( uint32_t vertexBindingDescriptionCount,
  16668. const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,
  16669. uint32_t vertexAttributeDescriptionCount,
  16670. const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,
  16671. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16672. {
  16673. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16674. d.vkCmdSetVertexInputEXT( m_commandBuffer,
  16675. vertexBindingDescriptionCount,
  16676. reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ),
  16677. vertexAttributeDescriptionCount,
  16678. reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( pVertexAttributeDescriptions ) );
  16679. }
  16680. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16681. template <typename Dispatch>
  16682. VULKAN_HPP_INLINE void
  16683. CommandBuffer::setVertexInputEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
  16684. ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,
  16685. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16686. {
  16687. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16688. d.vkCmdSetVertexInputEXT( m_commandBuffer,
  16689. vertexBindingDescriptions.size(),
  16690. reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ),
  16691. vertexAttributeDescriptions.size(),
  16692. reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) );
  16693. }
  16694. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16695. #if defined( VK_USE_PLATFORM_FUCHSIA )
  16696. //=== VK_FUCHSIA_external_memory ===
  16697. template <typename Dispatch>
  16698. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  16699. Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
  16700. zx_handle_t * pZirconHandle,
  16701. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16702. {
  16703. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16704. return static_cast<Result>(
  16705. d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
  16706. }
  16707. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16708. template <typename Dispatch>
  16709. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type
  16710. Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const
  16711. {
  16712. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16713. zx_handle_t zirconHandle;
  16714. VkResult result =
  16715. d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle );
  16716. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" );
  16717. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), zirconHandle );
  16718. }
  16719. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16720. template <typename Dispatch>
  16721. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  16722. Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
  16723. zx_handle_t zirconHandle,
  16724. VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,
  16725. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16726. {
  16727. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16728. return static_cast<Result>(
  16729. d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device,
  16730. static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
  16731. zirconHandle,
  16732. reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) );
  16733. }
  16734. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16735. template <typename Dispatch>
  16736. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type
  16737. Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
  16738. zx_handle_t zirconHandle,
  16739. Dispatch const & d ) const
  16740. {
  16741. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16742. VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties;
  16743. VkResult result = d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device,
  16744. static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
  16745. zirconHandle,
  16746. reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) );
  16747. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" );
  16748. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryZirconHandleProperties );
  16749. }
  16750. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16751. #endif /*VK_USE_PLATFORM_FUCHSIA*/
  16752. #if defined( VK_USE_PLATFORM_FUCHSIA )
  16753. //=== VK_FUCHSIA_external_semaphore ===
  16754. template <typename Dispatch>
  16755. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA(
  16756. const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16757. {
  16758. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16759. return static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA(
  16760. m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) );
  16761. }
  16762. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16763. template <typename Dispatch>
  16764. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  16765. Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo,
  16766. Dispatch const & d ) const
  16767. {
  16768. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16769. VkResult result = d.vkImportSemaphoreZirconHandleFUCHSIA(
  16770. m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) );
  16771. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" );
  16772. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  16773. }
  16774. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16775. template <typename Dispatch>
  16776. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  16777. Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
  16778. zx_handle_t * pZirconHandle,
  16779. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16780. {
  16781. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16782. return static_cast<Result>(
  16783. d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
  16784. }
  16785. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16786. template <typename Dispatch>
  16787. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type
  16788. Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const
  16789. {
  16790. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16791. zx_handle_t zirconHandle;
  16792. VkResult result =
  16793. d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle );
  16794. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" );
  16795. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), zirconHandle );
  16796. }
  16797. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16798. #endif /*VK_USE_PLATFORM_FUCHSIA*/
  16799. #if defined( VK_USE_PLATFORM_FUCHSIA )
  16800. //=== VK_FUCHSIA_buffer_collection ===
  16801. template <typename Dispatch>
  16802. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  16803. Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo,
  16804. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  16805. VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection,
  16806. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16807. {
  16808. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16809. return static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA( m_device,
  16810. reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( pCreateInfo ),
  16811. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  16812. reinterpret_cast<VkBufferCollectionFUCHSIA *>( pCollection ) ) );
  16813. }
  16814. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16815. template <typename Dispatch>
  16816. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>::type
  16817. Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,
  16818. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16819. Dispatch const & d ) const
  16820. {
  16821. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16822. VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection;
  16823. VkResult result = d.vkCreateBufferCollectionFUCHSIA(
  16824. m_device,
  16825. reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ),
  16826. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  16827. reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) );
  16828. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" );
  16829. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), collection );
  16830. }
  16831. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  16832. template <typename Dispatch>
  16833. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>>::type
  16834. Device::createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,
  16835. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16836. Dispatch const & d ) const
  16837. {
  16838. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16839. VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection;
  16840. VkResult result = d.vkCreateBufferCollectionFUCHSIA(
  16841. m_device,
  16842. reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ),
  16843. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  16844. reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) );
  16845. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" );
  16846. return createResultValueType(
  16847. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16848. UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>( collection, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  16849. }
  16850. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  16851. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16852. template <typename Dispatch>
  16853. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  16854. Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
  16855. const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo,
  16856. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16857. {
  16858. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16859. return static_cast<Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA(
  16860. m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( pImageConstraintsInfo ) ) );
  16861. }
  16862. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16863. template <typename Dispatch>
  16864. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  16865. Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
  16866. const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo,
  16867. Dispatch const & d ) const
  16868. {
  16869. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16870. VkResult result = d.vkSetBufferCollectionImageConstraintsFUCHSIA(
  16871. m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( &imageConstraintsInfo ) );
  16872. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" );
  16873. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  16874. }
  16875. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16876. template <typename Dispatch>
  16877. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  16878. Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
  16879. const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo,
  16880. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16881. {
  16882. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16883. return static_cast<Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA(
  16884. m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( pBufferConstraintsInfo ) ) );
  16885. }
  16886. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16887. template <typename Dispatch>
  16888. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  16889. Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
  16890. const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo,
  16891. Dispatch const & d ) const
  16892. {
  16893. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16894. VkResult result = d.vkSetBufferCollectionBufferConstraintsFUCHSIA(
  16895. m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( &bufferConstraintsInfo ) );
  16896. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" );
  16897. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  16898. }
  16899. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16900. template <typename Dispatch>
  16901. VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
  16902. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  16903. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16904. {
  16905. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16906. d.vkDestroyBufferCollectionFUCHSIA(
  16907. m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  16908. }
  16909. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16910. template <typename Dispatch>
  16911. VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
  16912. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16913. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16914. {
  16915. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16916. d.vkDestroyBufferCollectionFUCHSIA(
  16917. m_device,
  16918. static_cast<VkBufferCollectionFUCHSIA>( collection ),
  16919. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  16920. }
  16921. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16922. template <typename Dispatch>
  16923. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
  16924. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  16925. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16926. {
  16927. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16928. d.vkDestroyBufferCollectionFUCHSIA(
  16929. m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  16930. }
  16931. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16932. template <typename Dispatch>
  16933. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
  16934. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16935. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16936. {
  16937. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16938. d.vkDestroyBufferCollectionFUCHSIA(
  16939. m_device,
  16940. static_cast<VkBufferCollectionFUCHSIA>( collection ),
  16941. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  16942. }
  16943. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16944. template <typename Dispatch>
  16945. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  16946. Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
  16947. VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties,
  16948. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16949. {
  16950. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16951. return static_cast<Result>( d.vkGetBufferCollectionPropertiesFUCHSIA(
  16952. m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( pProperties ) ) );
  16953. }
  16954. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16955. template <typename Dispatch>
  16956. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::type
  16957. Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Dispatch const & d ) const
  16958. {
  16959. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16960. VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties;
  16961. VkResult result = d.vkGetBufferCollectionPropertiesFUCHSIA(
  16962. m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( &properties ) );
  16963. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" );
  16964. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  16965. }
  16966. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16967. #endif /*VK_USE_PLATFORM_FUCHSIA*/
  16968. //=== VK_HUAWEI_subpass_shading ===
  16969. template <typename Dispatch>
  16970. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass,
  16971. VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize,
  16972. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16973. {
  16974. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16975. return static_cast<Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
  16976. m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( pMaxWorkgroupSize ) ) );
  16977. }
  16978. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16979. template <typename Dispatch>
  16980. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Extent2D>
  16981. Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d ) const
  16982. {
  16983. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16984. VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize;
  16985. VkResult result = d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
  16986. m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) );
  16987. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16988. VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI",
  16989. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
  16990. return ResultValue<VULKAN_HPP_NAMESPACE::Extent2D>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), maxWorkgroupSize );
  16991. }
  16992. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16993. template <typename Dispatch>
  16994. VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16995. {
  16996. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16997. d.vkCmdSubpassShadingHUAWEI( m_commandBuffer );
  16998. }
  16999. //=== VK_HUAWEI_invocation_mask ===
  17000. template <typename Dispatch>
  17001. VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView,
  17002. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
  17003. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17004. {
  17005. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17006. d.vkCmdBindInvocationMaskHUAWEI( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
  17007. }
  17008. //=== VK_NV_external_memory_rdma ===
  17009. template <typename Dispatch>
  17010. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  17011. Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo,
  17012. VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress,
  17013. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17014. {
  17015. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17016. return static_cast<Result>( d.vkGetMemoryRemoteAddressNV(
  17017. m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( pMemoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( pAddress ) ) );
  17018. }
  17019. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17020. template <typename Dispatch>
  17021. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RemoteAddressNV>::type
  17022. Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, Dispatch const & d ) const
  17023. {
  17024. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17025. VULKAN_HPP_NAMESPACE::RemoteAddressNV address;
  17026. VkResult result = d.vkGetMemoryRemoteAddressNV(
  17027. m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &memoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( &address ) );
  17028. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" );
  17029. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), address );
  17030. }
  17031. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  17032. //=== VK_EXT_pipeline_properties ===
  17033. template <typename Dispatch>
  17034. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo,
  17035. VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties,
  17036. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17037. {
  17038. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17039. return static_cast<Result>( d.vkGetPipelinePropertiesEXT(
  17040. m_device, reinterpret_cast<const VkPipelineInfoEXT *>( pPipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( pPipelineProperties ) ) );
  17041. }
  17042. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17043. template <typename Dispatch>
  17044. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BaseOutStructure>::type
  17045. Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d ) const
  17046. {
  17047. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17048. VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties;
  17049. VkResult result = d.vkGetPipelinePropertiesEXT(
  17050. m_device, reinterpret_cast<const VkPipelineInfoEXT *>( &pipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( &pipelineProperties ) );
  17051. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" );
  17052. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineProperties );
  17053. }
  17054. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  17055. //=== VK_EXT_extended_dynamic_state2 ===
  17056. template <typename Dispatch>
  17057. VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17058. {
  17059. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17060. d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints );
  17061. }
  17062. template <typename Dispatch>
  17063. VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,
  17064. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17065. {
  17066. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17067. d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
  17068. }
  17069. template <typename Dispatch>
  17070. VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17071. {
  17072. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17073. d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
  17074. }
  17075. template <typename Dispatch>
  17076. VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17077. {
  17078. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17079. d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast<VkLogicOp>( logicOp ) );
  17080. }
  17081. template <typename Dispatch>
  17082. VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,
  17083. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17084. {
  17085. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17086. d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
  17087. }
  17088. #if defined( VK_USE_PLATFORM_SCREEN_QNX )
  17089. //=== VK_QNX_screen_surface ===
  17090. template <typename Dispatch>
  17091. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,
  17092. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  17093. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  17094. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17095. {
  17096. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17097. return static_cast<Result>( d.vkCreateScreenSurfaceQNX( m_instance,
  17098. reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ),
  17099. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  17100. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  17101. }
  17102. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17103. template <typename Dispatch>
  17104. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  17105. Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,
  17106. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  17107. Dispatch const & d ) const
  17108. {
  17109. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17110. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  17111. VkResult result = d.vkCreateScreenSurfaceQNX(
  17112. m_instance,
  17113. reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
  17114. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  17115. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  17116. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" );
  17117. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  17118. }
  17119. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  17120. template <typename Dispatch>
  17121. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  17122. Instance::createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,
  17123. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  17124. Dispatch const & d ) const
  17125. {
  17126. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17127. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  17128. VkResult result = d.vkCreateScreenSurfaceQNX(
  17129. m_instance,
  17130. reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
  17131. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  17132. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  17133. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" );
  17134. return createResultValueType(
  17135. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  17136. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  17137. }
  17138. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  17139. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  17140. template <typename Dispatch>
  17141. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex,
  17142. struct _screen_window * window,
  17143. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17144. {
  17145. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17146. return static_cast<Bool32>( d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) );
  17147. }
  17148. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17149. template <typename Dispatch>
  17150. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
  17151. PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17152. {
  17153. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17154. VkBool32 result = d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window );
  17155. return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
  17156. }
  17157. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  17158. #endif /*VK_USE_PLATFORM_SCREEN_QNX*/
  17159. //=== VK_EXT_color_write_enable ===
  17160. template <typename Dispatch>
  17161. VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount,
  17162. const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,
  17163. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17164. {
  17165. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17166. d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) );
  17167. }
  17168. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17169. template <typename Dispatch>
  17170. VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,
  17171. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17172. {
  17173. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17174. d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) );
  17175. }
  17176. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  17177. //=== VK_KHR_ray_tracing_maintenance1 ===
  17178. template <typename Dispatch>
  17179. VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
  17180. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17181. {
  17182. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17183. d.vkCmdTraceRaysIndirect2KHR( m_commandBuffer, static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
  17184. }
  17185. //=== VK_EXT_multi_draw ===
  17186. template <typename Dispatch>
  17187. VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( uint32_t drawCount,
  17188. const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo,
  17189. uint32_t instanceCount,
  17190. uint32_t firstInstance,
  17191. uint32_t stride,
  17192. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17193. {
  17194. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17195. d.vkCmdDrawMultiEXT( m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawInfoEXT *>( pVertexInfo ), instanceCount, firstInstance, stride );
  17196. }
  17197. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17198. template <typename Dispatch>
  17199. VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,
  17200. uint32_t instanceCount,
  17201. uint32_t firstInstance,
  17202. uint32_t stride,
  17203. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17204. {
  17205. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17206. d.vkCmdDrawMultiEXT(
  17207. m_commandBuffer, vertexInfo.size(), reinterpret_cast<const VkMultiDrawInfoEXT *>( vertexInfo.data() ), instanceCount, firstInstance, stride );
  17208. }
  17209. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  17210. template <typename Dispatch>
  17211. VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( uint32_t drawCount,
  17212. const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo,
  17213. uint32_t instanceCount,
  17214. uint32_t firstInstance,
  17215. uint32_t stride,
  17216. const int32_t * pVertexOffset,
  17217. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17218. {
  17219. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17220. d.vkCmdDrawMultiIndexedEXT(
  17221. m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( pIndexInfo ), instanceCount, firstInstance, stride, pVertexOffset );
  17222. }
  17223. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17224. template <typename Dispatch>
  17225. VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,
  17226. uint32_t instanceCount,
  17227. uint32_t firstInstance,
  17228. uint32_t stride,
  17229. Optional<const int32_t> vertexOffset,
  17230. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17231. {
  17232. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17233. d.vkCmdDrawMultiIndexedEXT( m_commandBuffer,
  17234. indexInfo.size(),
  17235. reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( indexInfo.data() ),
  17236. instanceCount,
  17237. firstInstance,
  17238. stride,
  17239. static_cast<const int32_t *>( vertexOffset ) );
  17240. }
  17241. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  17242. //=== VK_EXT_pageable_device_local_memory ===
  17243. template <typename Dispatch>
  17244. VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17245. {
  17246. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17247. d.vkSetDeviceMemoryPriorityEXT( m_device, static_cast<VkDeviceMemory>( memory ), priority );
  17248. }
  17249. //=== VK_KHR_maintenance4 ===
  17250. template <typename Dispatch>
  17251. VULKAN_HPP_INLINE void Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,
  17252. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  17253. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17254. {
  17255. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17256. d.vkGetDeviceBufferMemoryRequirementsKHR(
  17257. m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  17258. }
  17259. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17260. template <typename Dispatch>
  17261. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  17262. Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17263. {
  17264. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17265. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  17266. d.vkGetDeviceBufferMemoryRequirementsKHR(
  17267. m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  17268. return memoryRequirements;
  17269. }
  17270. template <typename X, typename Y, typename... Z, typename Dispatch>
  17271. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
  17272. Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17273. {
  17274. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17275. StructureChain<X, Y, Z...> structureChain;
  17276. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  17277. d.vkGetDeviceBufferMemoryRequirementsKHR(
  17278. m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  17279. return structureChain;
  17280. }
  17281. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  17282. template <typename Dispatch>
  17283. VULKAN_HPP_INLINE void Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
  17284. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  17285. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17286. {
  17287. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17288. d.vkGetDeviceImageMemoryRequirementsKHR(
  17289. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  17290. }
  17291. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17292. template <typename Dispatch>
  17293. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  17294. Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17295. {
  17296. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17297. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  17298. d.vkGetDeviceImageMemoryRequirementsKHR(
  17299. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  17300. return memoryRequirements;
  17301. }
  17302. template <typename X, typename Y, typename... Z, typename Dispatch>
  17303. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...>
  17304. Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17305. {
  17306. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17307. StructureChain<X, Y, Z...> structureChain;
  17308. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  17309. d.vkGetDeviceImageMemoryRequirementsKHR(
  17310. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  17311. return structureChain;
  17312. }
  17313. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  17314. template <typename Dispatch>
  17315. VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
  17316. uint32_t * pSparseMemoryRequirementCount,
  17317. VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
  17318. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17319. {
  17320. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17321. d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
  17322. reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
  17323. pSparseMemoryRequirementCount,
  17324. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
  17325. }
  17326. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17327. template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
  17328. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
  17329. Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const
  17330. {
  17331. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17332. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
  17333. uint32_t sparseMemoryRequirementCount;
  17334. d.vkGetDeviceImageSparseMemoryRequirementsKHR(
  17335. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
  17336. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  17337. d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
  17338. reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
  17339. &sparseMemoryRequirementCount,
  17340. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  17341. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  17342. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  17343. {
  17344. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  17345. }
  17346. return sparseMemoryRequirements;
  17347. }
  17348. template <typename SparseImageMemoryRequirements2Allocator,
  17349. typename Dispatch,
  17350. typename B1,
  17351. typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type>
  17352. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
  17353. Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
  17354. SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
  17355. Dispatch const & d ) const
  17356. {
  17357. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17358. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
  17359. sparseImageMemoryRequirements2Allocator );
  17360. uint32_t sparseMemoryRequirementCount;
  17361. d.vkGetDeviceImageSparseMemoryRequirementsKHR(
  17362. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
  17363. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  17364. d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
  17365. reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
  17366. &sparseMemoryRequirementCount,
  17367. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  17368. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  17369. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  17370. {
  17371. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  17372. }
  17373. return sparseMemoryRequirements;
  17374. }
  17375. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  17376. //=== VK_VALVE_descriptor_set_host_mapping ===
  17377. template <typename Dispatch>
  17378. VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference,
  17379. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping,
  17380. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17381. {
  17382. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17383. d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device,
  17384. reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( pBindingReference ),
  17385. reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( pHostMapping ) );
  17386. }
  17387. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17388. template <typename Dispatch>
  17389. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE
  17390. Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference,
  17391. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17392. {
  17393. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17394. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping;
  17395. d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device,
  17396. reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( &bindingReference ),
  17397. reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( &hostMapping ) );
  17398. return hostMapping;
  17399. }
  17400. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  17401. template <typename Dispatch>
  17402. VULKAN_HPP_INLINE void
  17403. Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void ** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17404. {
  17405. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17406. d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), ppData );
  17407. }
  17408. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17409. template <typename Dispatch>
  17410. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
  17411. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17412. {
  17413. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17414. void * pData;
  17415. d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), &pData );
  17416. return pData;
  17417. }
  17418. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  17419. //=== VK_EXT_shader_module_identifier ===
  17420. template <typename Dispatch>
  17421. VULKAN_HPP_INLINE void Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
  17422. VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier,
  17423. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17424. {
  17425. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17426. d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) );
  17427. }
  17428. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17429. template <typename Dispatch>
  17430. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT
  17431. Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17432. {
  17433. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17434. VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier;
  17435. d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
  17436. return identifier;
  17437. }
  17438. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  17439. template <typename Dispatch>
  17440. VULKAN_HPP_INLINE void Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,
  17441. VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier,
  17442. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17443. {
  17444. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17445. d.vkGetShaderModuleCreateInfoIdentifierEXT(
  17446. m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) );
  17447. }
  17448. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17449. template <typename Dispatch>
  17450. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT
  17451. Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
  17452. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17453. {
  17454. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17455. VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier;
  17456. d.vkGetShaderModuleCreateInfoIdentifierEXT(
  17457. m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
  17458. return identifier;
  17459. }
  17460. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  17461. //=== VK_QCOM_tile_properties ===
  17462. template <typename Dispatch>
  17463. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
  17464. uint32_t * pPropertiesCount,
  17465. VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties,
  17466. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17467. {
  17468. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17469. return static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM(
  17470. m_device, static_cast<VkFramebuffer>( framebuffer ), pPropertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) );
  17471. }
  17472. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17473. template <typename TilePropertiesQCOMAllocator, typename Dispatch>
  17474. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type
  17475. Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d ) const
  17476. {
  17477. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17478. std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties;
  17479. uint32_t propertiesCount;
  17480. VkResult result;
  17481. do
  17482. {
  17483. result = d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr );
  17484. if ( ( result == VK_SUCCESS ) && propertiesCount )
  17485. {
  17486. properties.resize( propertiesCount );
  17487. result = d.vkGetFramebufferTilePropertiesQCOM(
  17488. m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) );
  17489. }
  17490. } while ( result == VK_INCOMPLETE );
  17491. VULKAN_HPP_ASSERT( propertiesCount <= properties.size() );
  17492. if ( propertiesCount < properties.size() )
  17493. {
  17494. properties.resize( propertiesCount );
  17495. }
  17496. return properties;
  17497. }
  17498. template <typename TilePropertiesQCOMAllocator,
  17499. typename Dispatch,
  17500. typename B1,
  17501. typename std::enable_if<std::is_same<typename B1::value_type, TilePropertiesQCOM>::value, int>::type>
  17502. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type
  17503. Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
  17504. TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator,
  17505. Dispatch const & d ) const
  17506. {
  17507. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17508. std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties( tilePropertiesQCOMAllocator );
  17509. uint32_t propertiesCount;
  17510. VkResult result;
  17511. do
  17512. {
  17513. result = d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr );
  17514. if ( ( result == VK_SUCCESS ) && propertiesCount )
  17515. {
  17516. properties.resize( propertiesCount );
  17517. result = d.vkGetFramebufferTilePropertiesQCOM(
  17518. m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) );
  17519. }
  17520. } while ( result == VK_INCOMPLETE );
  17521. VULKAN_HPP_ASSERT( propertiesCount <= properties.size() );
  17522. if ( propertiesCount < properties.size() )
  17523. {
  17524. properties.resize( propertiesCount );
  17525. }
  17526. return properties;
  17527. }
  17528. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  17529. template <typename Dispatch>
  17530. VULKAN_HPP_INLINE Result Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,
  17531. VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties,
  17532. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17533. {
  17534. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17535. return static_cast<Result>( d.vkGetDynamicRenderingTilePropertiesQCOM(
  17536. m_device, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) );
  17537. }
  17538. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17539. template <typename Dispatch>
  17540. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM
  17541. Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17542. {
  17543. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17544. VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties;
  17545. d.vkGetDynamicRenderingTilePropertiesQCOM(
  17546. m_device, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( &properties ) );
  17547. return properties;
  17548. }
  17549. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  17550. } // namespace VULKAN_HPP_NAMESPACE
  17551. #endif