vulkan_funcs.hpp 1.4 MB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783978497859786978797889789979097919792979397949795979697979798979998009801980298039804980598069807980898099810981198129813981498159816981798189819982098219822982398249825982698279828982998309831983298339834983598369837983898399840984198429843984498459846984798489849985098519852985398549855985698579858985998609861986298639864986598669867986898699870987198729873987498759876987798789879988098819882988398849885988698879888988998909891989298939894989598969897989898999900990199029903990499059906990799089909991099119912991399149915991699179918991999209921992299239924992599269927992899299930993199329933993499359936993799389939994099419942994399449945994699479948994999509951995299539954995599569957995899599960996199629963996499659966996799689969997099719972997399749975997699779978997999809981998299839984998599869987998899899990999199929993999499959996999799989999100001000110002100031000410005100061000710008100091001010011100121001310014100151001610017100181001910020100211002210023100241002510026100271002810029100301003110032100331003410035100361003710038100391004010041100421004310044100451004610047100481004910050100511005210053100541005510056100571005810059100601006110062100631006410065100661006710068100691007010071100721007310074100751007610077100781007910080100811008210083100841008510086100871008810089100901009110092100931009410095100961009710098100991010010101101021010310104101051010610107101081010910110101111011210113101141011510116101171011810119101201012110122101231012410125101261012710128101291013010131101321013310134101351013610137101381013910140101411014210143101441014510146101471014810149101501015110152101531015410155101561015710158101591016010161101621016310164101651016610167101681016910170101711017210173101741017510176101771017810179101801018110182101831018410185101861018710188101891019010191101921019310194101951019610197101981019910200102011020210203102041020510206102071020810209102101021110212102131021410215102161021710218102191022010221102221022310224102251022610227102281022910230102311023210233102341023510236102371023810239102401024110242102431024410245102461024710248102491025010251102521025310254102551025610257102581025910260102611026210263102641026510266102671026810269102701027110272102731027410275102761027710278102791028010281102821028310284102851028610287102881028910290102911029210293102941029510296102971029810299103001030110302103031030410305103061030710308103091031010311103121031310314103151031610317103181031910320103211032210323103241032510326103271032810329103301033110332103331033410335103361033710338103391034010341103421034310344103451034610347103481034910350103511035210353103541035510356103571035810359103601036110362103631036410365103661036710368103691037010371103721037310374103751037610377103781037910380103811038210383103841038510386103871038810389103901039110392103931039410395103961039710398103991040010401104021040310404104051040610407104081040910410104111041210413104141041510416104171041810419104201042110422104231042410425104261042710428104291043010431104321043310434104351043610437104381043910440104411044210443104441044510446104471044810449104501045110452104531045410455104561045710458104591046010461104621046310464104651046610467104681046910470104711047210473104741047510476104771047810479104801048110482104831048410485104861048710488104891049010491104921049310494104951049610497104981049910500105011050210503105041050510506105071050810509105101051110512105131051410515105161051710518105191052010521105221052310524105251052610527105281052910530105311053210533105341053510536105371053810539105401054110542105431054410545105461054710548105491055010551105521055310554105551055610557105581055910560105611056210563105641056510566105671056810569105701057110572105731057410575105761057710578105791058010581105821058310584105851058610587105881058910590105911059210593105941059510596105971059810599106001060110602106031060410605106061060710608106091061010611106121061310614106151061610617106181061910620106211062210623106241062510626106271062810629106301063110632106331063410635106361063710638106391064010641106421064310644106451064610647106481064910650106511065210653106541065510656106571065810659106601066110662106631066410665106661066710668106691067010671106721067310674106751067610677106781067910680106811068210683106841068510686106871068810689106901069110692106931069410695106961069710698106991070010701107021070310704107051070610707107081070910710107111071210713107141071510716107171071810719107201072110722107231072410725107261072710728107291073010731107321073310734107351073610737107381073910740107411074210743107441074510746107471074810749107501075110752107531075410755107561075710758107591076010761107621076310764107651076610767107681076910770107711077210773107741077510776107771077810779107801078110782107831078410785107861078710788107891079010791107921079310794107951079610797107981079910800108011080210803108041080510806108071080810809108101081110812108131081410815108161081710818108191082010821108221082310824108251082610827108281082910830108311083210833108341083510836108371083810839108401084110842108431084410845108461084710848108491085010851108521085310854108551085610857108581085910860108611086210863108641086510866108671086810869108701087110872108731087410875108761087710878108791088010881108821088310884108851088610887108881088910890108911089210893108941089510896108971089810899109001090110902109031090410905109061090710908109091091010911109121091310914109151091610917109181091910920109211092210923109241092510926109271092810929109301093110932109331093410935109361093710938109391094010941109421094310944109451094610947109481094910950109511095210953109541095510956109571095810959109601096110962109631096410965109661096710968109691097010971109721097310974109751097610977109781097910980109811098210983109841098510986109871098810989109901099110992109931099410995109961099710998109991100011001110021100311004110051100611007110081100911010110111101211013110141101511016110171101811019110201102111022110231102411025110261102711028110291103011031110321103311034110351103611037110381103911040110411104211043110441104511046110471104811049110501105111052110531105411055110561105711058110591106011061110621106311064110651106611067110681106911070110711107211073110741107511076110771107811079110801108111082110831108411085110861108711088110891109011091110921109311094110951109611097110981109911100111011110211103111041110511106111071110811109111101111111112111131111411115111161111711118111191112011121111221112311124111251112611127111281112911130111311113211133111341113511136111371113811139111401114111142111431114411145111461114711148111491115011151111521115311154111551115611157111581115911160111611116211163111641116511166111671116811169111701117111172111731117411175111761117711178111791118011181111821118311184111851118611187111881118911190111911119211193111941119511196111971119811199112001120111202112031120411205112061120711208112091121011211112121121311214112151121611217112181121911220112211122211223112241122511226112271122811229112301123111232112331123411235112361123711238112391124011241112421124311244112451124611247112481124911250112511125211253112541125511256112571125811259112601126111262112631126411265112661126711268112691127011271112721127311274112751127611277112781127911280112811128211283112841128511286112871128811289112901129111292112931129411295112961129711298112991130011301113021130311304113051130611307113081130911310113111131211313113141131511316113171131811319113201132111322113231132411325113261132711328113291133011331113321133311334113351133611337113381133911340113411134211343113441134511346113471134811349113501135111352113531135411355113561135711358113591136011361113621136311364113651136611367113681136911370113711137211373113741137511376113771137811379113801138111382113831138411385113861138711388113891139011391113921139311394113951139611397113981139911400114011140211403114041140511406114071140811409114101141111412114131141411415114161141711418114191142011421114221142311424114251142611427114281142911430114311143211433114341143511436114371143811439114401144111442114431144411445114461144711448114491145011451114521145311454114551145611457114581145911460114611146211463114641146511466114671146811469114701147111472114731147411475114761147711478114791148011481114821148311484114851148611487114881148911490114911149211493114941149511496114971149811499115001150111502115031150411505115061150711508115091151011511115121151311514115151151611517115181151911520115211152211523115241152511526115271152811529115301153111532115331153411535115361153711538115391154011541115421154311544115451154611547115481154911550115511155211553115541155511556115571155811559115601156111562115631156411565115661156711568115691157011571115721157311574115751157611577115781157911580115811158211583115841158511586115871158811589115901159111592115931159411595115961159711598115991160011601116021160311604116051160611607116081160911610116111161211613116141161511616116171161811619116201162111622116231162411625116261162711628116291163011631116321163311634116351163611637116381163911640116411164211643116441164511646116471164811649116501165111652116531165411655116561165711658116591166011661116621166311664116651166611667116681166911670116711167211673116741167511676116771167811679116801168111682116831168411685116861168711688116891169011691116921169311694116951169611697116981169911700117011170211703117041170511706117071170811709117101171111712117131171411715117161171711718117191172011721117221172311724117251172611727117281172911730117311173211733117341173511736117371173811739117401174111742117431174411745117461174711748117491175011751117521175311754117551175611757117581175911760117611176211763117641176511766117671176811769117701177111772117731177411775117761177711778117791178011781117821178311784117851178611787117881178911790117911179211793117941179511796117971179811799118001180111802118031180411805118061180711808118091181011811118121181311814118151181611817118181181911820118211182211823118241182511826118271182811829118301183111832118331183411835118361183711838118391184011841118421184311844118451184611847118481184911850118511185211853118541185511856118571185811859118601186111862118631186411865118661186711868118691187011871118721187311874118751187611877118781187911880118811188211883118841188511886118871188811889118901189111892118931189411895118961189711898118991190011901119021190311904119051190611907119081190911910119111191211913119141191511916119171191811919119201192111922119231192411925119261192711928119291193011931119321193311934119351193611937119381193911940119411194211943119441194511946119471194811949119501195111952119531195411955119561195711958119591196011961119621196311964119651196611967119681196911970119711197211973119741197511976119771197811979119801198111982119831198411985119861198711988119891199011991119921199311994119951199611997119981199912000120011200212003120041200512006120071200812009120101201112012120131201412015120161201712018120191202012021120221202312024120251202612027120281202912030120311203212033120341203512036120371203812039120401204112042120431204412045120461204712048120491205012051120521205312054120551205612057120581205912060120611206212063120641206512066120671206812069120701207112072120731207412075120761207712078120791208012081120821208312084120851208612087120881208912090120911209212093120941209512096120971209812099121001210112102121031210412105121061210712108121091211012111121121211312114121151211612117121181211912120121211212212123121241212512126121271212812129121301213112132121331213412135121361213712138121391214012141121421214312144121451214612147121481214912150121511215212153121541215512156121571215812159121601216112162121631216412165121661216712168121691217012171121721217312174121751217612177121781217912180121811218212183121841218512186121871218812189121901219112192121931219412195121961219712198121991220012201122021220312204122051220612207122081220912210122111221212213122141221512216122171221812219122201222112222122231222412225122261222712228122291223012231122321223312234122351223612237122381223912240122411224212243122441224512246122471224812249122501225112252122531225412255122561225712258122591226012261122621226312264122651226612267122681226912270122711227212273122741227512276122771227812279122801228112282122831228412285122861228712288122891229012291122921229312294122951229612297122981229912300123011230212303123041230512306123071230812309123101231112312123131231412315123161231712318123191232012321123221232312324123251232612327123281232912330123311233212333123341233512336123371233812339123401234112342123431234412345123461234712348123491235012351123521235312354123551235612357123581235912360123611236212363123641236512366123671236812369123701237112372123731237412375123761237712378123791238012381123821238312384123851238612387123881238912390123911239212393123941239512396123971239812399124001240112402124031240412405124061240712408124091241012411124121241312414124151241612417124181241912420124211242212423124241242512426124271242812429124301243112432124331243412435124361243712438124391244012441124421244312444124451244612447124481244912450124511245212453124541245512456124571245812459124601246112462124631246412465124661246712468124691247012471124721247312474124751247612477124781247912480124811248212483124841248512486124871248812489124901249112492124931249412495124961249712498124991250012501125021250312504125051250612507125081250912510125111251212513125141251512516125171251812519125201252112522125231252412525125261252712528125291253012531125321253312534125351253612537125381253912540125411254212543125441254512546125471254812549125501255112552125531255412555125561255712558125591256012561125621256312564125651256612567125681256912570125711257212573125741257512576125771257812579125801258112582125831258412585125861258712588125891259012591125921259312594125951259612597125981259912600126011260212603126041260512606126071260812609126101261112612126131261412615126161261712618126191262012621126221262312624126251262612627126281262912630126311263212633126341263512636126371263812639126401264112642126431264412645126461264712648126491265012651126521265312654126551265612657126581265912660126611266212663126641266512666126671266812669126701267112672126731267412675126761267712678126791268012681126821268312684126851268612687126881268912690126911269212693126941269512696126971269812699127001270112702127031270412705127061270712708127091271012711127121271312714127151271612717127181271912720127211272212723127241272512726127271272812729127301273112732127331273412735127361273712738127391274012741127421274312744127451274612747127481274912750127511275212753127541275512756127571275812759127601276112762127631276412765127661276712768127691277012771127721277312774127751277612777127781277912780127811278212783127841278512786127871278812789127901279112792127931279412795127961279712798127991280012801128021280312804128051280612807128081280912810128111281212813128141281512816128171281812819128201282112822128231282412825128261282712828128291283012831128321283312834128351283612837128381283912840128411284212843128441284512846128471284812849128501285112852128531285412855128561285712858128591286012861128621286312864128651286612867128681286912870128711287212873128741287512876128771287812879128801288112882128831288412885128861288712888128891289012891128921289312894128951289612897128981289912900129011290212903129041290512906129071290812909129101291112912129131291412915129161291712918129191292012921129221292312924129251292612927129281292912930129311293212933129341293512936129371293812939129401294112942129431294412945129461294712948129491295012951129521295312954129551295612957129581295912960129611296212963129641296512966129671296812969129701297112972129731297412975129761297712978129791298012981129821298312984129851298612987129881298912990129911299212993129941299512996129971299812999130001300113002130031300413005130061300713008130091301013011130121301313014130151301613017130181301913020130211302213023130241302513026130271302813029130301303113032130331303413035130361303713038130391304013041130421304313044130451304613047130481304913050130511305213053130541305513056130571305813059130601306113062130631306413065130661306713068130691307013071130721307313074130751307613077130781307913080130811308213083130841308513086130871308813089130901309113092130931309413095130961309713098130991310013101131021310313104131051310613107131081310913110131111311213113131141311513116131171311813119131201312113122131231312413125131261312713128131291313013131131321313313134131351313613137131381313913140131411314213143131441314513146131471314813149131501315113152131531315413155131561315713158131591316013161131621316313164131651316613167131681316913170131711317213173131741317513176131771317813179131801318113182131831318413185131861318713188131891319013191131921319313194131951319613197131981319913200132011320213203132041320513206132071320813209132101321113212132131321413215132161321713218132191322013221132221322313224132251322613227132281322913230132311323213233132341323513236132371323813239132401324113242132431324413245132461324713248132491325013251132521325313254132551325613257132581325913260132611326213263132641326513266132671326813269132701327113272132731327413275132761327713278132791328013281132821328313284132851328613287132881328913290132911329213293132941329513296132971329813299133001330113302133031330413305133061330713308133091331013311133121331313314133151331613317133181331913320133211332213323133241332513326133271332813329133301333113332133331333413335133361333713338133391334013341133421334313344133451334613347133481334913350133511335213353133541335513356133571335813359133601336113362133631336413365133661336713368133691337013371133721337313374133751337613377133781337913380133811338213383133841338513386133871338813389133901339113392133931339413395133961339713398133991340013401134021340313404134051340613407134081340913410134111341213413134141341513416134171341813419134201342113422134231342413425134261342713428134291343013431134321343313434134351343613437134381343913440134411344213443134441344513446134471344813449134501345113452134531345413455134561345713458134591346013461134621346313464134651346613467134681346913470134711347213473134741347513476134771347813479134801348113482134831348413485134861348713488134891349013491134921349313494134951349613497134981349913500135011350213503135041350513506135071350813509135101351113512135131351413515135161351713518135191352013521135221352313524135251352613527135281352913530135311353213533135341353513536135371353813539135401354113542135431354413545135461354713548135491355013551135521355313554135551355613557135581355913560135611356213563135641356513566135671356813569135701357113572135731357413575135761357713578135791358013581135821358313584135851358613587135881358913590135911359213593135941359513596135971359813599136001360113602136031360413605136061360713608136091361013611136121361313614136151361613617136181361913620136211362213623136241362513626136271362813629136301363113632136331363413635136361363713638136391364013641136421364313644136451364613647136481364913650136511365213653136541365513656136571365813659136601366113662136631366413665136661366713668136691367013671136721367313674136751367613677136781367913680136811368213683136841368513686136871368813689136901369113692136931369413695136961369713698136991370013701137021370313704137051370613707137081370913710137111371213713137141371513716137171371813719137201372113722137231372413725137261372713728137291373013731137321373313734137351373613737137381373913740137411374213743137441374513746137471374813749137501375113752137531375413755137561375713758137591376013761137621376313764137651376613767137681376913770137711377213773137741377513776137771377813779137801378113782137831378413785137861378713788137891379013791137921379313794137951379613797137981379913800138011380213803138041380513806138071380813809138101381113812138131381413815138161381713818138191382013821138221382313824138251382613827138281382913830138311383213833138341383513836138371383813839138401384113842138431384413845138461384713848138491385013851138521385313854138551385613857138581385913860138611386213863138641386513866138671386813869138701387113872138731387413875138761387713878138791388013881138821388313884138851388613887138881388913890138911389213893138941389513896138971389813899139001390113902139031390413905139061390713908139091391013911139121391313914139151391613917139181391913920139211392213923139241392513926139271392813929139301393113932139331393413935139361393713938139391394013941139421394313944139451394613947139481394913950139511395213953139541395513956139571395813959139601396113962139631396413965139661396713968139691397013971139721397313974139751397613977139781397913980139811398213983139841398513986139871398813989139901399113992139931399413995139961399713998139991400014001140021400314004140051400614007140081400914010140111401214013140141401514016140171401814019140201402114022140231402414025140261402714028140291403014031140321403314034140351403614037140381403914040140411404214043140441404514046140471404814049140501405114052140531405414055140561405714058140591406014061140621406314064140651406614067140681406914070140711407214073140741407514076140771407814079140801408114082140831408414085140861408714088140891409014091140921409314094140951409614097140981409914100141011410214103141041410514106141071410814109141101411114112141131411414115141161411714118141191412014121141221412314124141251412614127141281412914130141311413214133141341413514136141371413814139141401414114142141431414414145141461414714148141491415014151141521415314154141551415614157141581415914160141611416214163141641416514166141671416814169141701417114172141731417414175141761417714178141791418014181141821418314184141851418614187141881418914190141911419214193141941419514196141971419814199142001420114202142031420414205142061420714208142091421014211142121421314214142151421614217142181421914220142211422214223142241422514226142271422814229142301423114232142331423414235142361423714238142391424014241142421424314244142451424614247142481424914250142511425214253142541425514256142571425814259142601426114262142631426414265142661426714268142691427014271142721427314274142751427614277142781427914280142811428214283142841428514286142871428814289142901429114292142931429414295142961429714298142991430014301143021430314304143051430614307143081430914310143111431214313143141431514316143171431814319143201432114322143231432414325143261432714328143291433014331143321433314334143351433614337143381433914340143411434214343143441434514346143471434814349143501435114352143531435414355143561435714358143591436014361143621436314364143651436614367143681436914370143711437214373143741437514376143771437814379143801438114382143831438414385143861438714388143891439014391143921439314394143951439614397143981439914400144011440214403144041440514406144071440814409144101441114412144131441414415144161441714418144191442014421144221442314424144251442614427144281442914430144311443214433144341443514436144371443814439144401444114442144431444414445144461444714448144491445014451144521445314454144551445614457144581445914460144611446214463144641446514466144671446814469144701447114472144731447414475144761447714478144791448014481144821448314484144851448614487144881448914490144911449214493144941449514496144971449814499145001450114502145031450414505145061450714508145091451014511145121451314514145151451614517145181451914520145211452214523145241452514526145271452814529145301453114532145331453414535145361453714538145391454014541145421454314544145451454614547145481454914550145511455214553145541455514556145571455814559145601456114562145631456414565145661456714568145691457014571145721457314574145751457614577145781457914580145811458214583145841458514586145871458814589145901459114592145931459414595145961459714598145991460014601146021460314604146051460614607146081460914610146111461214613146141461514616146171461814619146201462114622146231462414625146261462714628146291463014631146321463314634146351463614637146381463914640146411464214643146441464514646146471464814649146501465114652146531465414655146561465714658146591466014661146621466314664146651466614667146681466914670146711467214673146741467514676146771467814679146801468114682146831468414685146861468714688146891469014691146921469314694146951469614697146981469914700147011470214703147041470514706147071470814709147101471114712147131471414715147161471714718147191472014721147221472314724147251472614727147281472914730147311473214733147341473514736147371473814739147401474114742147431474414745147461474714748147491475014751147521475314754147551475614757147581475914760147611476214763147641476514766147671476814769147701477114772147731477414775147761477714778147791478014781147821478314784147851478614787147881478914790147911479214793147941479514796147971479814799148001480114802148031480414805148061480714808148091481014811148121481314814148151481614817148181481914820148211482214823148241482514826148271482814829148301483114832148331483414835148361483714838148391484014841148421484314844148451484614847148481484914850148511485214853148541485514856148571485814859148601486114862148631486414865148661486714868148691487014871148721487314874148751487614877148781487914880148811488214883148841488514886148871488814889148901489114892148931489414895148961489714898148991490014901149021490314904149051490614907149081490914910149111491214913149141491514916149171491814919149201492114922149231492414925149261492714928149291493014931149321493314934149351493614937149381493914940149411494214943149441494514946149471494814949149501495114952149531495414955149561495714958149591496014961149621496314964149651496614967149681496914970149711497214973149741497514976149771497814979149801498114982149831498414985149861498714988149891499014991149921499314994149951499614997149981499915000150011500215003150041500515006150071500815009150101501115012150131501415015150161501715018150191502015021150221502315024150251502615027150281502915030150311503215033150341503515036150371503815039150401504115042150431504415045150461504715048150491505015051150521505315054150551505615057150581505915060150611506215063150641506515066150671506815069150701507115072150731507415075150761507715078150791508015081150821508315084150851508615087150881508915090150911509215093150941509515096150971509815099151001510115102151031510415105151061510715108151091511015111151121511315114151151511615117151181511915120151211512215123151241512515126151271512815129151301513115132151331513415135151361513715138151391514015141151421514315144151451514615147151481514915150151511515215153151541515515156151571515815159151601516115162151631516415165151661516715168151691517015171151721517315174151751517615177151781517915180151811518215183151841518515186151871518815189151901519115192151931519415195151961519715198151991520015201152021520315204152051520615207152081520915210152111521215213152141521515216152171521815219152201522115222152231522415225152261522715228152291523015231152321523315234152351523615237152381523915240152411524215243152441524515246152471524815249152501525115252152531525415255152561525715258152591526015261152621526315264152651526615267152681526915270152711527215273152741527515276152771527815279152801528115282152831528415285152861528715288152891529015291152921529315294152951529615297152981529915300153011530215303153041530515306153071530815309153101531115312153131531415315153161531715318153191532015321153221532315324153251532615327153281532915330153311533215333153341533515336153371533815339153401534115342153431534415345153461534715348153491535015351153521535315354153551535615357153581535915360153611536215363153641536515366153671536815369153701537115372153731537415375153761537715378153791538015381153821538315384153851538615387153881538915390153911539215393153941539515396153971539815399154001540115402154031540415405154061540715408154091541015411154121541315414154151541615417154181541915420154211542215423154241542515426154271542815429154301543115432154331543415435154361543715438154391544015441154421544315444154451544615447154481544915450154511545215453154541545515456154571545815459154601546115462154631546415465154661546715468154691547015471154721547315474154751547615477154781547915480154811548215483154841548515486154871548815489154901549115492154931549415495154961549715498154991550015501155021550315504155051550615507155081550915510155111551215513155141551515516155171551815519155201552115522155231552415525155261552715528155291553015531155321553315534155351553615537155381553915540155411554215543155441554515546155471554815549155501555115552155531555415555155561555715558155591556015561155621556315564155651556615567155681556915570155711557215573155741557515576155771557815579155801558115582155831558415585155861558715588155891559015591155921559315594155951559615597155981559915600156011560215603156041560515606156071560815609156101561115612156131561415615156161561715618156191562015621156221562315624156251562615627156281562915630156311563215633156341563515636156371563815639156401564115642156431564415645156461564715648156491565015651156521565315654156551565615657156581565915660156611566215663156641566515666156671566815669156701567115672156731567415675156761567715678156791568015681156821568315684156851568615687156881568915690156911569215693156941569515696156971569815699157001570115702157031570415705157061570715708157091571015711157121571315714157151571615717157181571915720157211572215723157241572515726157271572815729157301573115732157331573415735157361573715738157391574015741157421574315744157451574615747157481574915750157511575215753157541575515756157571575815759157601576115762157631576415765157661576715768157691577015771157721577315774157751577615777157781577915780157811578215783157841578515786157871578815789157901579115792157931579415795157961579715798157991580015801158021580315804158051580615807158081580915810158111581215813158141581515816158171581815819158201582115822158231582415825158261582715828158291583015831158321583315834158351583615837158381583915840158411584215843158441584515846158471584815849158501585115852158531585415855158561585715858158591586015861158621586315864158651586615867158681586915870158711587215873158741587515876158771587815879158801588115882158831588415885158861588715888158891589015891158921589315894158951589615897158981589915900159011590215903159041590515906159071590815909159101591115912159131591415915159161591715918159191592015921159221592315924159251592615927159281592915930159311593215933159341593515936159371593815939159401594115942159431594415945159461594715948159491595015951159521595315954159551595615957159581595915960159611596215963159641596515966159671596815969159701597115972159731597415975159761597715978159791598015981159821598315984159851598615987159881598915990159911599215993159941599515996159971599815999160001600116002160031600416005160061600716008160091601016011160121601316014160151601616017160181601916020160211602216023160241602516026160271602816029160301603116032160331603416035160361603716038160391604016041160421604316044160451604616047160481604916050160511605216053160541605516056160571605816059160601606116062160631606416065160661606716068160691607016071160721607316074160751607616077160781607916080160811608216083160841608516086160871608816089160901609116092160931609416095160961609716098160991610016101161021610316104161051610616107161081610916110161111611216113161141611516116161171611816119161201612116122161231612416125161261612716128161291613016131161321613316134161351613616137161381613916140161411614216143161441614516146161471614816149161501615116152161531615416155161561615716158161591616016161161621616316164161651616616167161681616916170161711617216173161741617516176161771617816179161801618116182161831618416185161861618716188161891619016191161921619316194161951619616197161981619916200162011620216203162041620516206162071620816209162101621116212162131621416215162161621716218162191622016221162221622316224162251622616227162281622916230162311623216233162341623516236162371623816239162401624116242162431624416245162461624716248162491625016251162521625316254162551625616257162581625916260162611626216263162641626516266162671626816269162701627116272162731627416275162761627716278162791628016281162821628316284162851628616287162881628916290162911629216293162941629516296162971629816299163001630116302163031630416305163061630716308163091631016311163121631316314163151631616317163181631916320163211632216323163241632516326163271632816329163301633116332163331633416335163361633716338163391634016341163421634316344163451634616347163481634916350163511635216353163541635516356163571635816359163601636116362163631636416365163661636716368163691637016371163721637316374163751637616377163781637916380163811638216383163841638516386163871638816389163901639116392163931639416395163961639716398163991640016401164021640316404164051640616407164081640916410164111641216413164141641516416164171641816419164201642116422164231642416425164261642716428164291643016431164321643316434164351643616437164381643916440164411644216443164441644516446164471644816449164501645116452164531645416455164561645716458164591646016461164621646316464164651646616467164681646916470164711647216473164741647516476164771647816479164801648116482164831648416485164861648716488164891649016491164921649316494164951649616497164981649916500165011650216503165041650516506165071650816509165101651116512165131651416515165161651716518165191652016521165221652316524165251652616527165281652916530165311653216533165341653516536165371653816539165401654116542165431654416545165461654716548165491655016551165521655316554165551655616557165581655916560165611656216563165641656516566165671656816569165701657116572165731657416575165761657716578165791658016581165821658316584165851658616587165881658916590165911659216593165941659516596165971659816599166001660116602166031660416605166061660716608166091661016611166121661316614166151661616617166181661916620166211662216623166241662516626166271662816629166301663116632166331663416635166361663716638166391664016641166421664316644166451664616647166481664916650166511665216653166541665516656166571665816659166601666116662166631666416665166661666716668166691667016671166721667316674166751667616677166781667916680166811668216683166841668516686166871668816689166901669116692166931669416695166961669716698166991670016701167021670316704167051670616707167081670916710167111671216713167141671516716167171671816719167201672116722167231672416725167261672716728167291673016731167321673316734167351673616737167381673916740167411674216743167441674516746167471674816749167501675116752167531675416755167561675716758167591676016761167621676316764167651676616767167681676916770167711677216773167741677516776167771677816779167801678116782167831678416785167861678716788167891679016791167921679316794167951679616797167981679916800168011680216803168041680516806168071680816809168101681116812168131681416815168161681716818168191682016821168221682316824168251682616827168281682916830168311683216833168341683516836168371683816839168401684116842168431684416845168461684716848168491685016851168521685316854168551685616857168581685916860168611686216863168641686516866168671686816869168701687116872168731687416875168761687716878168791688016881168821688316884168851688616887168881688916890168911689216893168941689516896168971689816899169001690116902169031690416905169061690716908169091691016911169121691316914169151691616917169181691916920169211692216923169241692516926169271692816929169301693116932169331693416935169361693716938169391694016941169421694316944169451694616947169481694916950169511695216953169541695516956169571695816959169601696116962169631696416965169661696716968169691697016971169721697316974169751697616977169781697916980169811698216983169841698516986169871698816989169901699116992169931699416995169961699716998169991700017001170021700317004170051700617007170081700917010170111701217013170141701517016170171701817019170201702117022170231702417025170261702717028170291703017031170321703317034170351703617037170381703917040170411704217043170441704517046170471704817049170501705117052170531705417055170561705717058170591706017061170621706317064170651706617067170681706917070170711707217073170741707517076170771707817079170801708117082170831708417085170861708717088170891709017091170921709317094170951709617097170981709917100171011710217103171041710517106171071710817109171101711117112171131711417115171161711717118171191712017121171221712317124171251712617127171281712917130171311713217133171341713517136171371713817139171401714117142171431714417145171461714717148171491715017151171521715317154171551715617157171581715917160171611716217163171641716517166171671716817169171701717117172171731717417175171761717717178171791718017181171821718317184171851718617187171881718917190171911719217193171941719517196171971719817199172001720117202172031720417205172061720717208172091721017211172121721317214172151721617217172181721917220172211722217223172241722517226172271722817229172301723117232172331723417235172361723717238172391724017241172421724317244172451724617247172481724917250172511725217253172541725517256172571725817259172601726117262172631726417265172661726717268172691727017271172721727317274172751727617277172781727917280172811728217283172841728517286172871728817289172901729117292172931729417295172961729717298172991730017301173021730317304173051730617307173081730917310173111731217313173141731517316173171731817319173201732117322173231732417325173261732717328173291733017331173321733317334173351733617337173381733917340173411734217343173441734517346173471734817349173501735117352173531735417355173561735717358173591736017361173621736317364173651736617367173681736917370173711737217373173741737517376173771737817379173801738117382173831738417385173861738717388173891739017391173921739317394173951739617397173981739917400174011740217403174041740517406174071740817409174101741117412174131741417415174161741717418174191742017421174221742317424174251742617427174281742917430174311743217433174341743517436174371743817439174401744117442174431744417445174461744717448174491745017451174521745317454174551745617457174581745917460174611746217463174641746517466174671746817469174701747117472174731747417475174761747717478174791748017481174821748317484174851748617487174881748917490174911749217493174941749517496174971749817499175001750117502175031750417505175061750717508175091751017511175121751317514175151751617517175181751917520175211752217523175241752517526175271752817529175301753117532175331753417535175361753717538175391754017541175421754317544175451754617547175481754917550175511755217553175541755517556175571755817559175601756117562175631756417565175661756717568175691757017571175721757317574175751757617577175781757917580175811758217583175841758517586175871758817589175901759117592175931759417595175961759717598175991760017601176021760317604176051760617607176081760917610176111761217613176141761517616176171761817619176201762117622176231762417625176261762717628176291763017631176321763317634176351763617637176381763917640176411764217643176441764517646176471764817649176501765117652176531765417655176561765717658176591766017661176621766317664176651766617667176681766917670176711767217673176741767517676176771767817679176801768117682176831768417685176861768717688176891769017691176921769317694176951769617697176981769917700177011770217703177041770517706177071770817709177101771117712177131771417715177161771717718177191772017721177221772317724177251772617727177281772917730177311773217733177341773517736177371773817739177401774117742177431774417745177461774717748177491775017751177521775317754177551775617757177581775917760177611776217763177641776517766177671776817769177701777117772177731777417775177761777717778177791778017781177821778317784177851778617787177881778917790177911779217793177941779517796177971779817799178001780117802178031780417805178061780717808178091781017811178121781317814178151781617817178181781917820178211782217823178241782517826178271782817829178301783117832178331783417835178361783717838178391784017841178421784317844178451784617847178481784917850178511785217853178541785517856178571785817859178601786117862178631786417865178661786717868178691787017871178721787317874178751787617877178781787917880178811788217883178841788517886178871788817889178901789117892178931789417895178961789717898178991790017901179021790317904179051790617907179081790917910179111791217913179141791517916179171791817919179201792117922179231792417925179261792717928179291793017931179321793317934179351793617937179381793917940179411794217943179441794517946179471794817949179501795117952179531795417955179561795717958179591796017961179621796317964179651796617967179681796917970179711797217973179741797517976179771797817979179801798117982179831798417985179861798717988179891799017991179921799317994179951799617997179981799918000180011800218003180041800518006180071800818009180101801118012180131801418015180161801718018180191802018021180221802318024180251802618027180281802918030180311803218033180341803518036180371803818039180401804118042180431804418045180461804718048180491805018051180521805318054180551805618057180581805918060180611806218063180641806518066180671806818069180701807118072180731807418075180761807718078180791808018081180821808318084180851808618087180881808918090180911809218093180941809518096180971809818099181001810118102181031810418105181061810718108181091811018111181121811318114181151811618117181181811918120181211812218123181241812518126181271812818129181301813118132181331813418135181361813718138181391814018141181421814318144181451814618147181481814918150181511815218153181541815518156181571815818159181601816118162181631816418165181661816718168181691817018171181721817318174181751817618177181781817918180181811818218183181841818518186181871818818189181901819118192181931819418195181961819718198181991820018201182021820318204182051820618207182081820918210182111821218213182141821518216182171821818219182201822118222182231822418225182261822718228182291823018231182321823318234182351823618237182381823918240182411824218243182441824518246182471824818249182501825118252182531825418255182561825718258182591826018261182621826318264182651826618267182681826918270182711827218273182741827518276182771827818279182801828118282182831828418285182861828718288182891829018291182921829318294182951829618297182981829918300183011830218303183041830518306183071830818309183101831118312183131831418315183161831718318183191832018321183221832318324183251832618327183281832918330183311833218333183341833518336183371833818339183401834118342183431834418345183461834718348183491835018351183521835318354183551835618357183581835918360183611836218363183641836518366183671836818369183701837118372183731837418375183761837718378183791838018381183821838318384183851838618387183881838918390183911839218393183941839518396183971839818399184001840118402184031840418405184061840718408184091841018411184121841318414184151841618417184181841918420184211842218423184241842518426184271842818429184301843118432184331843418435184361843718438184391844018441184421844318444184451844618447184481844918450184511845218453184541845518456184571845818459184601846118462184631846418465184661846718468184691847018471184721847318474184751847618477184781847918480184811848218483184841848518486184871848818489184901849118492184931849418495184961849718498184991850018501185021850318504185051850618507185081850918510185111851218513185141851518516185171851818519185201852118522185231852418525185261852718528185291853018531185321853318534185351853618537185381853918540185411854218543185441854518546185471854818549185501855118552185531855418555185561855718558185591856018561185621856318564185651856618567185681856918570185711857218573185741857518576185771857818579185801858118582185831858418585185861858718588185891859018591185921859318594185951859618597185981859918600186011860218603186041860518606186071860818609186101861118612186131861418615186161861718618186191862018621186221862318624186251862618627186281862918630186311863218633186341863518636186371863818639186401864118642186431864418645186461864718648186491865018651186521865318654186551865618657186581865918660186611866218663186641866518666186671866818669186701867118672186731867418675186761867718678186791868018681186821868318684186851868618687186881868918690186911869218693186941869518696186971869818699187001870118702187031870418705187061870718708187091871018711187121871318714187151871618717187181871918720187211872218723187241872518726187271872818729187301873118732187331873418735187361873718738187391874018741187421874318744187451874618747187481874918750187511875218753187541875518756187571875818759187601876118762187631876418765187661876718768187691877018771187721877318774187751877618777187781877918780187811878218783187841878518786187871878818789187901879118792187931879418795187961879718798187991880018801188021880318804188051880618807188081880918810188111881218813188141881518816188171881818819188201882118822188231882418825188261882718828188291883018831188321883318834188351883618837188381883918840188411884218843188441884518846188471884818849188501885118852188531885418855188561885718858188591886018861188621886318864188651886618867188681886918870188711887218873188741887518876188771887818879188801888118882188831888418885188861888718888188891889018891188921889318894188951889618897188981889918900189011890218903189041890518906189071890818909189101891118912189131891418915189161891718918189191892018921189221892318924189251892618927189281892918930189311893218933189341893518936189371893818939189401894118942189431894418945189461894718948189491895018951189521895318954189551895618957189581895918960189611896218963189641896518966189671896818969189701897118972189731897418975189761897718978189791898018981189821898318984189851898618987189881898918990189911899218993189941899518996189971899818999190001900119002190031900419005190061900719008190091901019011190121901319014190151901619017190181901919020190211902219023190241902519026190271902819029190301903119032190331903419035190361903719038190391904019041190421904319044190451904619047190481904919050190511905219053190541905519056190571905819059190601906119062190631906419065190661906719068190691907019071190721907319074190751907619077190781907919080190811908219083190841908519086190871908819089190901909119092190931909419095190961909719098190991910019101191021910319104191051910619107191081910919110191111911219113191141911519116191171911819119191201912119122191231912419125191261912719128191291913019131191321913319134191351913619137191381913919140191411914219143191441914519146191471914819149191501915119152191531915419155191561915719158191591916019161191621916319164191651916619167191681916919170191711917219173191741917519176191771917819179191801918119182191831918419185191861918719188191891919019191191921919319194191951919619197191981919919200192011920219203192041920519206192071920819209192101921119212192131921419215192161921719218192191922019221192221922319224192251922619227192281922919230192311923219233192341923519236192371923819239192401924119242192431924419245192461924719248192491925019251192521925319254192551925619257192581925919260192611926219263192641926519266192671926819269192701927119272192731927419275192761927719278192791928019281192821928319284192851928619287192881928919290192911929219293192941929519296192971929819299193001930119302193031930419305193061930719308193091931019311193121931319314193151931619317193181931919320193211932219323193241932519326193271932819329193301933119332193331933419335193361933719338193391934019341193421934319344193451934619347193481934919350193511935219353193541935519356193571935819359193601936119362193631936419365193661936719368193691937019371193721937319374193751937619377193781937919380193811938219383193841938519386193871938819389193901939119392193931939419395193961939719398193991940019401194021940319404194051940619407194081940919410194111941219413194141941519416194171941819419194201942119422194231942419425194261942719428194291943019431194321943319434194351943619437194381943919440194411944219443194441944519446194471944819449194501945119452194531945419455194561945719458194591946019461194621946319464194651946619467194681946919470194711947219473194741947519476194771947819479194801948119482194831948419485194861948719488194891949019491194921949319494194951949619497194981949919500195011950219503195041950519506195071950819509195101951119512195131951419515195161951719518195191952019521195221952319524195251952619527195281952919530195311953219533195341953519536195371953819539195401954119542195431954419545195461954719548195491955019551195521955319554195551955619557195581955919560195611956219563195641956519566195671956819569195701957119572195731957419575195761957719578195791958019581195821958319584195851958619587195881958919590195911959219593195941959519596195971959819599196001960119602196031960419605196061960719608196091961019611196121961319614196151961619617196181961919620196211962219623196241962519626196271962819629196301963119632196331963419635196361963719638196391964019641196421964319644196451964619647196481964919650196511965219653196541965519656196571965819659196601966119662196631966419665196661966719668196691967019671196721967319674196751967619677196781967919680196811968219683196841968519686196871968819689196901969119692196931969419695196961969719698196991970019701197021970319704197051970619707197081970919710197111971219713197141971519716197171971819719197201972119722197231972419725197261972719728197291973019731197321973319734197351973619737197381973919740197411974219743197441974519746197471974819749197501975119752197531975419755197561975719758197591976019761197621976319764197651976619767197681976919770197711977219773197741977519776197771977819779197801978119782197831978419785197861978719788197891979019791197921979319794197951979619797197981979919800198011980219803198041980519806198071980819809198101981119812198131981419815198161981719818198191982019821198221982319824198251982619827198281982919830198311983219833198341983519836198371983819839198401984119842198431984419845198461984719848198491985019851198521985319854198551985619857198581985919860198611986219863198641986519866198671986819869198701987119872198731987419875198761987719878198791988019881198821988319884198851988619887198881988919890198911989219893198941989519896198971989819899199001990119902199031990419905199061990719908199091991019911199121991319914199151991619917199181991919920199211992219923199241992519926199271992819929199301993119932199331993419935199361993719938199391994019941199421994319944199451994619947199481994919950199511995219953199541995519956199571995819959199601996119962199631996419965199661996719968199691997019971199721997319974199751997619977199781997919980199811998219983199841998519986199871998819989199901999119992199931999419995199961999719998199992000020001200022000320004200052000620007200082000920010200112001220013200142001520016200172001820019200202002120022200232002420025200262002720028200292003020031200322003320034200352003620037200382003920040200412004220043200442004520046200472004820049200502005120052200532005420055200562005720058200592006020061200622006320064200652006620067200682006920070200712007220073200742007520076200772007820079200802008120082200832008420085200862008720088200892009020091200922009320094200952009620097200982009920100201012010220103201042010520106201072010820109201102011120112201132011420115201162011720118201192012020121201222012320124201252012620127201282012920130201312013220133201342013520136201372013820139201402014120142201432014420145201462014720148201492015020151201522015320154201552015620157201582015920160201612016220163201642016520166201672016820169201702017120172201732017420175201762017720178201792018020181201822018320184201852018620187201882018920190201912019220193201942019520196201972019820199202002020120202202032020420205202062020720208202092021020211202122021320214202152021620217202182021920220202212022220223202242022520226202272022820229202302023120232202332023420235202362023720238202392024020241202422024320244202452024620247202482024920250202512025220253202542025520256202572025820259202602026120262202632026420265202662026720268202692027020271202722027320274202752027620277202782027920280202812028220283202842028520286202872028820289202902029120292202932029420295202962029720298202992030020301203022030320304203052030620307203082030920310203112031220313203142031520316203172031820319203202032120322203232032420325203262032720328203292033020331203322033320334203352033620337203382033920340203412034220343203442034520346203472034820349203502035120352203532035420355203562035720358203592036020361203622036320364203652036620367203682036920370203712037220373203742037520376203772037820379203802038120382203832038420385203862038720388203892039020391203922039320394203952039620397203982039920400204012040220403204042040520406204072040820409204102041120412204132041420415204162041720418204192042020421204222042320424204252042620427204282042920430204312043220433204342043520436204372043820439204402044120442204432044420445204462044720448204492045020451204522045320454204552045620457204582045920460204612046220463204642046520466204672046820469204702047120472204732047420475204762047720478204792048020481204822048320484204852048620487204882048920490204912049220493204942049520496204972049820499205002050120502205032050420505205062050720508205092051020511205122051320514205152051620517205182051920520205212052220523205242052520526205272052820529205302053120532205332053420535205362053720538205392054020541205422054320544205452054620547205482054920550205512055220553205542055520556205572055820559205602056120562205632056420565205662056720568205692057020571205722057320574205752057620577205782057920580205812058220583205842058520586205872058820589205902059120592205932059420595205962059720598205992060020601206022060320604206052060620607206082060920610206112061220613206142061520616206172061820619206202062120622206232062420625206262062720628206292063020631206322063320634206352063620637206382063920640206412064220643206442064520646206472064820649206502065120652206532065420655206562065720658206592066020661206622066320664206652066620667206682066920670206712067220673206742067520676206772067820679206802068120682206832068420685206862068720688206892069020691206922069320694206952069620697206982069920700207012070220703207042070520706207072070820709207102071120712207132071420715207162071720718207192072020721207222072320724207252072620727207282072920730207312073220733207342073520736207372073820739207402074120742207432074420745207462074720748207492075020751207522075320754207552075620757207582075920760207612076220763207642076520766207672076820769207702077120772207732077420775207762077720778207792078020781207822078320784207852078620787207882078920790207912079220793207942079520796207972079820799208002080120802208032080420805208062080720808208092081020811208122081320814208152081620817208182081920820208212082220823208242082520826208272082820829208302083120832208332083420835208362083720838208392084020841208422084320844208452084620847208482084920850208512085220853208542085520856208572085820859208602086120862208632086420865208662086720868208692087020871208722087320874208752087620877208782087920880208812088220883208842088520886208872088820889208902089120892208932089420895208962089720898208992090020901209022090320904209052090620907209082090920910209112091220913209142091520916209172091820919209202092120922209232092420925209262092720928209292093020931209322093320934209352093620937209382093920940209412094220943209442094520946209472094820949209502095120952209532095420955209562095720958209592096020961209622096320964209652096620967209682096920970209712097220973209742097520976209772097820979209802098120982209832098420985209862098720988209892099020991209922099320994209952099620997209982099921000210012100221003210042100521006210072100821009210102101121012210132101421015210162101721018210192102021021210222102321024210252102621027210282102921030210312103221033210342103521036210372103821039210402104121042210432104421045210462104721048210492105021051210522105321054210552105621057210582105921060210612106221063210642106521066210672106821069210702107121072210732107421075210762107721078210792108021081210822108321084210852108621087210882108921090210912109221093210942109521096210972109821099211002110121102211032110421105211062110721108211092111021111211122111321114211152111621117211182111921120211212112221123211242112521126211272112821129211302113121132211332113421135211362113721138211392114021141211422114321144211452114621147211482114921150211512115221153211542115521156211572115821159211602116121162211632116421165211662116721168211692117021171211722117321174211752117621177211782117921180211812118221183211842118521186211872118821189211902119121192211932119421195211962119721198211992120021201212022120321204212052120621207212082120921210212112121221213212142121521216212172121821219212202122121222212232122421225212262122721228212292123021231212322123321234212352123621237212382123921240212412124221243212442124521246212472124821249212502125121252212532125421255212562125721258212592126021261212622126321264212652126621267212682126921270212712127221273212742127521276212772127821279212802128121282212832128421285212862128721288212892129021291212922129321294212952129621297212982129921300213012130221303213042130521306213072130821309213102131121312213132131421315213162131721318213192132021321213222132321324213252132621327213282132921330213312133221333213342133521336213372133821339213402134121342213432134421345213462134721348213492135021351213522135321354213552135621357213582135921360213612136221363213642136521366213672136821369213702137121372213732137421375213762137721378213792138021381213822138321384213852138621387213882138921390213912139221393213942139521396213972139821399214002140121402214032140421405214062140721408214092141021411214122141321414214152141621417214182141921420214212142221423214242142521426214272142821429214302143121432214332143421435214362143721438214392144021441214422144321444214452144621447214482144921450214512145221453214542145521456214572145821459214602146121462214632146421465214662146721468214692147021471214722147321474214752147621477214782147921480214812148221483214842148521486214872148821489214902149121492214932149421495214962149721498214992150021501215022150321504215052150621507215082150921510215112151221513215142151521516215172151821519215202152121522215232152421525215262152721528215292153021531215322153321534215352153621537215382153921540215412154221543215442154521546215472154821549215502155121552215532155421555215562155721558215592156021561215622156321564215652156621567215682156921570215712157221573215742157521576215772157821579215802158121582215832158421585215862158721588215892159021591215922159321594215952159621597215982159921600216012160221603216042160521606216072160821609216102161121612216132161421615216162161721618216192162021621216222162321624216252162621627216282162921630216312163221633216342163521636216372163821639216402164121642216432164421645216462164721648216492165021651216522165321654216552165621657216582165921660216612166221663216642166521666216672166821669216702167121672216732167421675216762167721678216792168021681216822168321684216852168621687216882168921690216912169221693216942169521696216972169821699217002170121702217032170421705217062170721708217092171021711217122171321714217152171621717217182171921720217212172221723217242172521726217272172821729217302173121732217332173421735217362173721738217392174021741217422174321744217452174621747217482174921750217512175221753217542175521756217572175821759217602176121762217632176421765217662176721768217692177021771217722177321774217752177621777217782177921780217812178221783217842178521786217872178821789217902179121792217932179421795217962179721798217992180021801218022180321804218052180621807218082180921810218112181221813218142181521816218172181821819218202182121822218232182421825218262182721828218292183021831218322183321834218352183621837218382183921840218412184221843218442184521846218472184821849218502185121852218532185421855218562185721858218592186021861218622186321864218652186621867218682186921870218712187221873218742187521876218772187821879218802188121882218832188421885218862188721888218892189021891218922189321894218952189621897218982189921900219012190221903219042190521906219072190821909219102191121912219132191421915219162191721918219192192021921219222192321924219252192621927219282192921930219312193221933219342193521936219372193821939219402194121942219432194421945219462194721948219492195021951219522195321954219552195621957219582195921960219612196221963219642196521966219672196821969219702197121972219732197421975219762197721978219792198021981219822198321984219852198621987219882198921990219912199221993219942199521996219972199821999220002200122002220032200422005220062200722008220092201022011220122201322014220152201622017220182201922020220212202222023220242202522026220272202822029220302203122032220332203422035220362203722038220392204022041220422204322044220452204622047220482204922050220512205222053220542205522056220572205822059220602206122062220632206422065220662206722068220692207022071220722207322074220752207622077220782207922080220812208222083220842208522086220872208822089220902209122092220932209422095220962209722098220992210022101221022210322104221052210622107221082210922110221112211222113221142211522116221172211822119221202212122122221232212422125221262212722128221292213022131221322213322134221352213622137221382213922140221412214222143221442214522146221472214822149221502215122152221532215422155221562215722158221592216022161221622216322164221652216622167221682216922170221712217222173221742217522176221772217822179221802218122182221832218422185221862218722188221892219022191221922219322194221952219622197221982219922200222012220222203222042220522206222072220822209222102221122212222132221422215222162221722218222192222022221222222222322224222252222622227222282222922230222312223222233222342223522236222372223822239222402224122242222432224422245222462224722248222492225022251222522225322254222552225622257222582225922260222612226222263222642226522266222672226822269222702227122272222732227422275222762227722278222792228022281222822228322284222852228622287222882228922290222912229222293222942229522296222972229822299223002230122302223032230422305223062230722308223092231022311223122231322314223152231622317223182231922320223212232222323223242232522326223272232822329223302233122332223332233422335223362233722338223392234022341223422234322344223452234622347223482234922350223512235222353223542235522356223572235822359223602236122362223632236422365223662236722368223692237022371223722237322374223752237622377223782237922380223812238222383223842238522386223872238822389223902239122392223932239422395223962239722398223992240022401224022240322404224052240622407224082240922410224112241222413224142241522416224172241822419224202242122422224232242422425224262242722428224292243022431224322243322434224352243622437224382243922440224412244222443224442244522446224472244822449224502245122452224532245422455224562245722458224592246022461224622246322464224652246622467224682246922470224712247222473224742247522476224772247822479224802248122482224832248422485224862248722488224892249022491224922249322494224952249622497224982249922500225012250222503225042250522506225072250822509225102251122512225132251422515225162251722518225192252022521225222252322524225252252622527225282252922530225312253222533225342253522536225372253822539225402254122542225432254422545225462254722548225492255022551225522255322554225552255622557225582255922560225612256222563225642256522566225672256822569225702257122572225732257422575225762257722578225792258022581225822258322584225852258622587225882258922590225912259222593225942259522596225972259822599226002260122602226032260422605226062260722608226092261022611226122261322614226152261622617226182261922620226212262222623226242262522626226272262822629226302263122632226332263422635226362263722638226392264022641226422264322644226452264622647226482264922650226512265222653226542265522656226572265822659226602266122662226632266422665226662266722668226692267022671226722267322674226752267622677226782267922680226812268222683226842268522686226872268822689226902269122692226932269422695226962269722698226992270022701227022270322704227052270622707227082270922710227112271222713227142271522716227172271822719227202272122722227232272422725227262272722728227292273022731227322273322734227352273622737227382273922740227412274222743227442274522746227472274822749227502275122752227532275422755227562275722758227592276022761227622276322764227652276622767227682276922770227712277222773227742277522776227772277822779227802278122782227832278422785227862278722788227892279022791227922279322794227952279622797227982279922800228012280222803228042280522806228072280822809228102281122812228132281422815228162281722818228192282022821228222282322824228252282622827228282282922830228312283222833228342283522836228372283822839228402284122842228432284422845228462284722848228492285022851228522285322854228552285622857228582285922860228612286222863228642286522866228672286822869228702287122872228732287422875228762287722878228792288022881228822288322884228852288622887228882288922890228912289222893228942289522896228972289822899229002290122902229032290422905229062290722908229092291022911229122291322914229152291622917229182291922920229212292222923229242292522926229272292822929229302293122932229332293422935229362293722938229392294022941229422294322944229452294622947229482294922950229512295222953229542295522956229572295822959229602296122962229632296422965229662296722968229692297022971229722297322974229752297622977229782297922980229812298222983229842298522986229872298822989229902299122992229932299422995229962299722998229992300023001230022300323004230052300623007230082300923010230112301223013230142301523016230172301823019230202302123022230232302423025230262302723028230292303023031230322303323034230352303623037230382303923040230412304223043230442304523046230472304823049
  1. // Copyright 2015-2023 The Khronos Group Inc.
  2. //
  3. // SPDX-License-Identifier: Apache-2.0 OR MIT
  4. //
  5. // This header is generated from the Khronos Vulkan XML API Registry.
  6. #ifndef VULKAN_FUNCS_HPP
  7. #define VULKAN_FUNCS_HPP
  8. namespace VULKAN_HPP_NAMESPACE
  9. {
  10. //===========================
  11. //=== COMMAND Definitions ===
  12. //===========================
  13. //=== VK_VERSION_1_0 ===
  14. template <typename Dispatch>
  15. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo,
  16. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  17. VULKAN_HPP_NAMESPACE::Instance * pInstance,
  18. Dispatch const & d ) VULKAN_HPP_NOEXCEPT
  19. {
  20. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  21. return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ),
  22. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  23. reinterpret_cast<VkInstance *>( pInstance ) ) );
  24. }
  25. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  26. template <typename Dispatch>
  27. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance(
  28. const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d )
  29. {
  30. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  31. VULKAN_HPP_NAMESPACE::Instance instance;
  32. VkResult result =
  33. d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ),
  34. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  35. reinterpret_cast<VkInstance *>( &instance ) );
  36. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstance" );
  37. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), instance );
  38. }
  39. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  40. template <typename Dispatch>
  41. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type createInstanceUnique(
  42. const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d )
  43. {
  44. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  45. VULKAN_HPP_NAMESPACE::Instance instance;
  46. VkResult result =
  47. d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ),
  48. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  49. reinterpret_cast<VkInstance *>( &instance ) );
  50. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" );
  51. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  52. UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>( instance, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) );
  53. }
  54. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  55. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  56. template <typename Dispatch>
  57. VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  58. {
  59. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  60. d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  61. }
  62. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63. template <typename Dispatch>
  64. VULKAN_HPP_INLINE void Instance::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65. {
  66. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  67. d.vkDestroyInstance( m_instance,
  68. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  69. }
  70. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  71. template <typename Dispatch>
  72. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount,
  73. VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices,
  74. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  75. {
  76. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  77. return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) );
  78. }
  79. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  80. template <typename PhysicalDeviceAllocator, typename Dispatch>
  81. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type
  82. Instance::enumeratePhysicalDevices( Dispatch const & d ) const
  83. {
  84. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  85. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices;
  86. uint32_t physicalDeviceCount;
  87. VkResult result;
  88. do
  89. {
  90. result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr );
  91. if ( ( result == VK_SUCCESS ) && physicalDeviceCount )
  92. {
  93. physicalDevices.resize( physicalDeviceCount );
  94. result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) );
  95. }
  96. } while ( result == VK_INCOMPLETE );
  97. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
  98. VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
  99. if ( physicalDeviceCount < physicalDevices.size() )
  100. {
  101. physicalDevices.resize( physicalDeviceCount );
  102. }
  103. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDevices );
  104. }
  105. template <typename PhysicalDeviceAllocator,
  106. typename Dispatch,
  107. typename B1,
  108. typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDevice>::value, int>::type>
  109. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type
  110. Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const
  111. {
  112. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  113. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator );
  114. uint32_t physicalDeviceCount;
  115. VkResult result;
  116. do
  117. {
  118. result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr );
  119. if ( ( result == VK_SUCCESS ) && physicalDeviceCount )
  120. {
  121. physicalDevices.resize( physicalDeviceCount );
  122. result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) );
  123. }
  124. } while ( result == VK_INCOMPLETE );
  125. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
  126. VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
  127. if ( physicalDeviceCount < physicalDevices.size() )
  128. {
  129. physicalDevices.resize( physicalDeviceCount );
  130. }
  131. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDevices );
  132. }
  133. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  134. template <typename Dispatch>
  135. VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  136. {
  137. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  138. d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) );
  139. }
  140. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  141. template <typename Dispatch>
  142. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures
  143. PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  144. {
  145. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  146. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
  147. d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) );
  148. return features;
  149. }
  150. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  151. template <typename Dispatch>
  152. VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
  153. VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties,
  154. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  155. {
  156. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  157. d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) );
  158. }
  159. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  160. template <typename Dispatch>
  161. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties
  162. PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  163. {
  164. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  165. VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
  166. d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
  167. return formatProperties;
  168. }
  169. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  170. template <typename Dispatch>
  171. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
  172. VULKAN_HPP_NAMESPACE::ImageType type,
  173. VULKAN_HPP_NAMESPACE::ImageTiling tiling,
  174. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  175. VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
  176. VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties,
  177. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  178. {
  179. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  180. return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice,
  181. static_cast<VkFormat>( format ),
  182. static_cast<VkImageType>( type ),
  183. static_cast<VkImageTiling>( tiling ),
  184. static_cast<VkImageUsageFlags>( usage ),
  185. static_cast<VkImageCreateFlags>( flags ),
  186. reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) );
  187. }
  188. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  189. template <typename Dispatch>
  190. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type
  191. PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
  192. VULKAN_HPP_NAMESPACE::ImageType type,
  193. VULKAN_HPP_NAMESPACE::ImageTiling tiling,
  194. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  195. VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
  196. Dispatch const & d ) const
  197. {
  198. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  199. VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
  200. VkResult result = d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice,
  201. static_cast<VkFormat>( format ),
  202. static_cast<VkImageType>( type ),
  203. static_cast<VkImageTiling>( tiling ),
  204. static_cast<VkImageUsageFlags>( usage ),
  205. static_cast<VkImageCreateFlags>( flags ),
  206. reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) );
  207. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
  208. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
  209. }
  210. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  211. template <typename Dispatch>
  212. VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties,
  213. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  214. {
  215. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  216. d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) );
  217. }
  218. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  219. template <typename Dispatch>
  220. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties
  221. PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  222. {
  223. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  224. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
  225. d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) );
  226. return properties;
  227. }
  228. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  229. template <typename Dispatch>
  230. VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount,
  231. VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties,
  232. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  233. {
  234. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  235. d.vkGetPhysicalDeviceQueueFamilyProperties(
  236. m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) );
  237. }
  238. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  239. template <typename QueueFamilyPropertiesAllocator, typename Dispatch>
  240. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator>
  241. PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const
  242. {
  243. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  244. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties;
  245. uint32_t queueFamilyPropertyCount;
  246. d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  247. queueFamilyProperties.resize( queueFamilyPropertyCount );
  248. d.vkGetPhysicalDeviceQueueFamilyProperties(
  249. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
  250. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  251. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  252. {
  253. queueFamilyProperties.resize( queueFamilyPropertyCount );
  254. }
  255. return queueFamilyProperties;
  256. }
  257. template <typename QueueFamilyPropertiesAllocator,
  258. typename Dispatch,
  259. typename B1,
  260. typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties>::value, int>::type>
  261. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator>
  262. PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const
  263. {
  264. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  265. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties( queueFamilyPropertiesAllocator );
  266. uint32_t queueFamilyPropertyCount;
  267. d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  268. queueFamilyProperties.resize( queueFamilyPropertyCount );
  269. d.vkGetPhysicalDeviceQueueFamilyProperties(
  270. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
  271. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  272. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  273. {
  274. queueFamilyProperties.resize( queueFamilyPropertyCount );
  275. }
  276. return queueFamilyProperties;
  277. }
  278. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  279. template <typename Dispatch>
  280. VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties,
  281. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  282. {
  283. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  284. d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) );
  285. }
  286. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  287. template <typename Dispatch>
  288. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties
  289. PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  290. {
  291. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  292. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
  293. d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
  294. return memoryProperties;
  295. }
  296. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  297. template <typename Dispatch>
  298. VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  299. {
  300. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  301. return d.vkGetInstanceProcAddr( m_instance, pName );
  302. }
  303. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  304. template <typename Dispatch>
  305. VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  306. {
  307. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  308. PFN_vkVoidFunction result = d.vkGetInstanceProcAddr( m_instance, name.c_str() );
  309. return result;
  310. }
  311. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  312. template <typename Dispatch>
  313. VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  314. {
  315. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  316. return d.vkGetDeviceProcAddr( m_device, pName );
  317. }
  318. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  319. template <typename Dispatch>
  320. VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  321. {
  322. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  323. PFN_vkVoidFunction result = d.vkGetDeviceProcAddr( m_device, name.c_str() );
  324. return result;
  325. }
  326. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  327. template <typename Dispatch>
  328. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo,
  329. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  330. VULKAN_HPP_NAMESPACE::Device * pDevice,
  331. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  332. {
  333. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  334. return static_cast<Result>( d.vkCreateDevice( m_physicalDevice,
  335. reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ),
  336. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  337. reinterpret_cast<VkDevice *>( pDevice ) ) );
  338. }
  339. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  340. template <typename Dispatch>
  341. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type PhysicalDevice::createDevice(
  342. const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  343. {
  344. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  345. VULKAN_HPP_NAMESPACE::Device device;
  346. VkResult result =
  347. d.vkCreateDevice( m_physicalDevice,
  348. reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
  349. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  350. reinterpret_cast<VkDevice *>( &device ) );
  351. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" );
  352. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), device );
  353. }
  354. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  355. template <typename Dispatch>
  356. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type
  357. PhysicalDevice::createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo,
  358. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  359. Dispatch const & d ) const
  360. {
  361. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  362. VULKAN_HPP_NAMESPACE::Device device;
  363. VkResult result =
  364. d.vkCreateDevice( m_physicalDevice,
  365. reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ),
  366. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  367. reinterpret_cast<VkDevice *>( &device ) );
  368. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" );
  369. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  370. UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>( device, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) );
  371. }
  372. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  373. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  374. template <typename Dispatch>
  375. VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  376. {
  377. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  378. d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  379. }
  380. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  381. template <typename Dispatch>
  382. VULKAN_HPP_INLINE void Device::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  383. {
  384. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  385. d.vkDestroyDevice( m_device,
  386. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  387. }
  388. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  389. template <typename Dispatch>
  390. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char * pLayerName,
  391. uint32_t * pPropertyCount,
  392. VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
  393. Dispatch const & d ) VULKAN_HPP_NOEXCEPT
  394. {
  395. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  396. return static_cast<Result>(
  397. d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
  398. }
  399. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  400. template <typename ExtensionPropertiesAllocator, typename Dispatch>
  401. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
  402. enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d )
  403. {
  404. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  405. std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties;
  406. uint32_t propertyCount;
  407. VkResult result;
  408. do
  409. {
  410. result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
  411. if ( ( result == VK_SUCCESS ) && propertyCount )
  412. {
  413. properties.resize( propertyCount );
  414. result = d.vkEnumerateInstanceExtensionProperties(
  415. layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
  416. }
  417. } while ( result == VK_INCOMPLETE );
  418. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
  419. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  420. if ( propertyCount < properties.size() )
  421. {
  422. properties.resize( propertyCount );
  423. }
  424. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  425. }
  426. template <typename ExtensionPropertiesAllocator,
  427. typename Dispatch,
  428. typename B1,
  429. typename std::enable_if<std::is_same<typename B1::value_type, ExtensionProperties>::value, int>::type>
  430. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
  431. enumerateInstanceExtensionProperties( Optional<const std::string> layerName,
  432. ExtensionPropertiesAllocator & extensionPropertiesAllocator,
  433. Dispatch const & d )
  434. {
  435. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  436. std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
  437. uint32_t propertyCount;
  438. VkResult result;
  439. do
  440. {
  441. result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
  442. if ( ( result == VK_SUCCESS ) && propertyCount )
  443. {
  444. properties.resize( propertyCount );
  445. result = d.vkEnumerateInstanceExtensionProperties(
  446. layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
  447. }
  448. } while ( result == VK_INCOMPLETE );
  449. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
  450. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  451. if ( propertyCount < properties.size() )
  452. {
  453. properties.resize( propertyCount );
  454. }
  455. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  456. }
  457. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  458. template <typename Dispatch>
  459. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName,
  460. uint32_t * pPropertyCount,
  461. VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties,
  462. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  463. {
  464. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  465. return static_cast<Result>(
  466. d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
  467. }
  468. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  469. template <typename ExtensionPropertiesAllocator, typename Dispatch>
  470. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
  471. PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) const
  472. {
  473. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  474. std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties;
  475. uint32_t propertyCount;
  476. VkResult result;
  477. do
  478. {
  479. result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
  480. if ( ( result == VK_SUCCESS ) && propertyCount )
  481. {
  482. properties.resize( propertyCount );
  483. result = d.vkEnumerateDeviceExtensionProperties(
  484. m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
  485. }
  486. } while ( result == VK_INCOMPLETE );
  487. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
  488. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  489. if ( propertyCount < properties.size() )
  490. {
  491. properties.resize( propertyCount );
  492. }
  493. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  494. }
  495. template <typename ExtensionPropertiesAllocator,
  496. typename Dispatch,
  497. typename B1,
  498. typename std::enable_if<std::is_same<typename B1::value_type, ExtensionProperties>::value, int>::type>
  499. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type
  500. PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName,
  501. ExtensionPropertiesAllocator & extensionPropertiesAllocator,
  502. Dispatch const & d ) const
  503. {
  504. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  505. std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
  506. uint32_t propertyCount;
  507. VkResult result;
  508. do
  509. {
  510. result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
  511. if ( ( result == VK_SUCCESS ) && propertyCount )
  512. {
  513. properties.resize( propertyCount );
  514. result = d.vkEnumerateDeviceExtensionProperties(
  515. m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
  516. }
  517. } while ( result == VK_INCOMPLETE );
  518. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
  519. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  520. if ( propertyCount < properties.size() )
  521. {
  522. properties.resize( propertyCount );
  523. }
  524. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  525. }
  526. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  527. template <typename Dispatch>
  528. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount,
  529. VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
  530. Dispatch const & d ) VULKAN_HPP_NOEXCEPT
  531. {
  532. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  533. return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
  534. }
  535. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  536. template <typename LayerPropertiesAllocator, typename Dispatch>
  537. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
  538. enumerateInstanceLayerProperties( Dispatch const & d )
  539. {
  540. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  541. std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties;
  542. uint32_t propertyCount;
  543. VkResult result;
  544. do
  545. {
  546. result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr );
  547. if ( ( result == VK_SUCCESS ) && propertyCount )
  548. {
  549. properties.resize( propertyCount );
  550. result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
  551. }
  552. } while ( result == VK_INCOMPLETE );
  553. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
  554. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  555. if ( propertyCount < properties.size() )
  556. {
  557. properties.resize( propertyCount );
  558. }
  559. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  560. }
  561. template <typename LayerPropertiesAllocator,
  562. typename Dispatch,
  563. typename B1,
  564. typename std::enable_if<std::is_same<typename B1::value_type, LayerProperties>::value, int>::type>
  565. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
  566. enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d )
  567. {
  568. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  569. std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
  570. uint32_t propertyCount;
  571. VkResult result;
  572. do
  573. {
  574. result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr );
  575. if ( ( result == VK_SUCCESS ) && propertyCount )
  576. {
  577. properties.resize( propertyCount );
  578. result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
  579. }
  580. } while ( result == VK_INCOMPLETE );
  581. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
  582. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  583. if ( propertyCount < properties.size() )
  584. {
  585. properties.resize( propertyCount );
  586. }
  587. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  588. }
  589. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  590. template <typename Dispatch>
  591. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount,
  592. VULKAN_HPP_NAMESPACE::LayerProperties * pProperties,
  593. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  594. {
  595. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  596. return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
  597. }
  598. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  599. template <typename LayerPropertiesAllocator, typename Dispatch>
  600. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
  601. PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const
  602. {
  603. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  604. std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties;
  605. uint32_t propertyCount;
  606. VkResult result;
  607. do
  608. {
  609. result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr );
  610. if ( ( result == VK_SUCCESS ) && propertyCount )
  611. {
  612. properties.resize( propertyCount );
  613. result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
  614. }
  615. } while ( result == VK_INCOMPLETE );
  616. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
  617. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  618. if ( propertyCount < properties.size() )
  619. {
  620. properties.resize( propertyCount );
  621. }
  622. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  623. }
  624. template <typename LayerPropertiesAllocator,
  625. typename Dispatch,
  626. typename B1,
  627. typename std::enable_if<std::is_same<typename B1::value_type, LayerProperties>::value, int>::type>
  628. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type
  629. PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) const
  630. {
  631. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  632. std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
  633. uint32_t propertyCount;
  634. VkResult result;
  635. do
  636. {
  637. result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr );
  638. if ( ( result == VK_SUCCESS ) && propertyCount )
  639. {
  640. properties.resize( propertyCount );
  641. result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
  642. }
  643. } while ( result == VK_INCOMPLETE );
  644. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
  645. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  646. if ( propertyCount < properties.size() )
  647. {
  648. properties.resize( propertyCount );
  649. }
  650. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  651. }
  652. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  653. template <typename Dispatch>
  654. VULKAN_HPP_INLINE void
  655. Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  656. {
  657. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  658. d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) );
  659. }
  660. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  661. template <typename Dispatch>
  662. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue
  663. Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  664. {
  665. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  666. VULKAN_HPP_NAMESPACE::Queue queue;
  667. d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) );
  668. return queue;
  669. }
  670. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  671. template <typename Dispatch>
  672. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount,
  673. const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits,
  674. VULKAN_HPP_NAMESPACE::Fence fence,
  675. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  676. {
  677. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  678. return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) );
  679. }
  680. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  681. template <typename Dispatch>
  682. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit(
  683. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
  684. {
  685. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  686. VkResult result = d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) );
  687. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
  688. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  689. }
  690. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  691. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  692. template <typename Dispatch>
  693. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  694. {
  695. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  696. return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
  697. }
  698. #else
  699. template <typename Dispatch>
  700. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::waitIdle( Dispatch const & d ) const
  701. {
  702. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  703. VkResult result = d.vkQueueWaitIdle( m_queue );
  704. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
  705. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  706. }
  707. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  708. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  709. template <typename Dispatch>
  710. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  711. {
  712. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  713. return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
  714. }
  715. #else
  716. template <typename Dispatch>
  717. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::waitIdle( Dispatch const & d ) const
  718. {
  719. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  720. VkResult result = d.vkDeviceWaitIdle( m_device );
  721. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
  722. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  723. }
  724. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  725. template <typename Dispatch>
  726. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo,
  727. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  728. VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory,
  729. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  730. {
  731. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  732. return static_cast<Result>( d.vkAllocateMemory( m_device,
  733. reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ),
  734. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  735. reinterpret_cast<VkDeviceMemory *>( pMemory ) ) );
  736. }
  737. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  738. template <typename Dispatch>
  739. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type
  740. Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,
  741. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  742. Dispatch const & d ) const
  743. {
  744. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  745. VULKAN_HPP_NAMESPACE::DeviceMemory memory;
  746. VkResult result =
  747. d.vkAllocateMemory( m_device,
  748. reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
  749. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  750. reinterpret_cast<VkDeviceMemory *>( &memory ) );
  751. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" );
  752. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memory );
  753. }
  754. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  755. template <typename Dispatch>
  756. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type
  757. Device::allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo,
  758. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  759. Dispatch const & d ) const
  760. {
  761. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  762. VULKAN_HPP_NAMESPACE::DeviceMemory memory;
  763. VkResult result =
  764. d.vkAllocateMemory( m_device,
  765. reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ),
  766. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  767. reinterpret_cast<VkDeviceMemory *>( &memory ) );
  768. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" );
  769. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  770. UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>( memory, ObjectFree<Device, Dispatch>( *this, allocator, d ) ) );
  771. }
  772. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  773. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  774. template <typename Dispatch>
  775. VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  776. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  777. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  778. {
  779. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  780. d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  781. }
  782. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  783. template <typename Dispatch>
  784. VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  785. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  786. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  787. {
  788. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  789. d.vkFreeMemory( m_device,
  790. static_cast<VkDeviceMemory>( memory ),
  791. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  792. }
  793. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  794. template <typename Dispatch>
  795. VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  796. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  797. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  798. {
  799. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  800. d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  801. }
  802. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  803. template <typename Dispatch>
  804. VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  805. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  806. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  807. {
  808. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  809. d.vkFreeMemory( m_device,
  810. static_cast<VkDeviceMemory>( memory ),
  811. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  812. }
  813. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  814. template <typename Dispatch>
  815. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  816. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  817. VULKAN_HPP_NAMESPACE::DeviceSize size,
  818. VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,
  819. void ** ppData,
  820. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  821. {
  822. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  823. return static_cast<Result>( d.vkMapMemory( m_device,
  824. static_cast<VkDeviceMemory>( memory ),
  825. static_cast<VkDeviceSize>( offset ),
  826. static_cast<VkDeviceSize>( size ),
  827. static_cast<VkMemoryMapFlags>( flags ),
  828. ppData ) );
  829. }
  830. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  831. template <typename Dispatch>
  832. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  833. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  834. VULKAN_HPP_NAMESPACE::DeviceSize size,
  835. VULKAN_HPP_NAMESPACE::MemoryMapFlags flags,
  836. Dispatch const & d ) const
  837. {
  838. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  839. void * pData;
  840. VkResult result = d.vkMapMemory( m_device,
  841. static_cast<VkDeviceMemory>( memory ),
  842. static_cast<VkDeviceSize>( offset ),
  843. static_cast<VkDeviceSize>( size ),
  844. static_cast<VkMemoryMapFlags>( flags ),
  845. &pData );
  846. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" );
  847. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pData );
  848. }
  849. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  850. template <typename Dispatch>
  851. VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  852. {
  853. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  854. d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
  855. }
  856. template <typename Dispatch>
  857. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount,
  858. const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
  859. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  860. {
  861. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  862. return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
  863. }
  864. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  865. template <typename Dispatch>
  866. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  867. Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
  868. Dispatch const & d ) const
  869. {
  870. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  871. VkResult result = d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
  872. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
  873. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  874. }
  875. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  876. template <typename Dispatch>
  877. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount,
  878. const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges,
  879. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  880. {
  881. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  882. return static_cast<Result>(
  883. d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
  884. }
  885. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  886. template <typename Dispatch>
  887. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  888. Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges,
  889. Dispatch const & d ) const
  890. {
  891. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  892. VkResult result = d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
  893. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
  894. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  895. }
  896. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  897. template <typename Dispatch>
  898. VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  899. VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes,
  900. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  901. {
  902. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  903. d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) );
  904. }
  905. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  906. template <typename Dispatch>
  907. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  908. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  909. {
  910. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  911. VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
  912. d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
  913. return committedMemoryInBytes;
  914. }
  915. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  916. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  917. template <typename Dispatch>
  918. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer,
  919. VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  920. VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
  921. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  922. {
  923. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  924. return static_cast<Result>(
  925. d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
  926. }
  927. #else
  928. template <typename Dispatch>
  929. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory(
  930. VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const
  931. {
  932. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  933. VkResult result =
  934. d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
  935. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" );
  936. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  937. }
  938. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  939. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  940. template <typename Dispatch>
  941. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image,
  942. VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  943. VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset,
  944. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  945. {
  946. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  947. return static_cast<Result>(
  948. d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
  949. }
  950. #else
  951. template <typename Dispatch>
  952. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory(
  953. VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const
  954. {
  955. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  956. VkResult result =
  957. d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
  958. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" );
  959. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  960. }
  961. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  962. template <typename Dispatch>
  963. VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer,
  964. VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
  965. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  966. {
  967. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  968. d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
  969. }
  970. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  971. template <typename Dispatch>
  972. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements
  973. Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  974. {
  975. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  976. VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
  977. d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
  978. return memoryRequirements;
  979. }
  980. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  981. template <typename Dispatch>
  982. VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
  983. VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements,
  984. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  985. {
  986. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  987. d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
  988. }
  989. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  990. template <typename Dispatch>
  991. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements
  992. Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  993. {
  994. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  995. VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
  996. d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
  997. return memoryRequirements;
  998. }
  999. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1000. template <typename Dispatch>
  1001. VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
  1002. uint32_t * pSparseMemoryRequirementCount,
  1003. VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements,
  1004. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1005. {
  1006. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1007. d.vkGetImageSparseMemoryRequirements( m_device,
  1008. static_cast<VkImage>( image ),
  1009. pSparseMemoryRequirementCount,
  1010. reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) );
  1011. }
  1012. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1013. template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch>
  1014. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
  1015. Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
  1016. {
  1017. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1018. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements;
  1019. uint32_t sparseMemoryRequirementCount;
  1020. d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
  1021. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  1022. d.vkGetImageSparseMemoryRequirements( m_device,
  1023. static_cast<VkImage>( image ),
  1024. &sparseMemoryRequirementCount,
  1025. reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
  1026. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  1027. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  1028. {
  1029. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  1030. }
  1031. return sparseMemoryRequirements;
  1032. }
  1033. template <typename SparseImageMemoryRequirementsAllocator,
  1034. typename Dispatch,
  1035. typename B1,
  1036. typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements>::value, int>::type>
  1037. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator>
  1038. Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image,
  1039. SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator,
  1040. Dispatch const & d ) const
  1041. {
  1042. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1043. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements(
  1044. sparseImageMemoryRequirementsAllocator );
  1045. uint32_t sparseMemoryRequirementCount;
  1046. d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
  1047. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  1048. d.vkGetImageSparseMemoryRequirements( m_device,
  1049. static_cast<VkImage>( image ),
  1050. &sparseMemoryRequirementCount,
  1051. reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
  1052. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  1053. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  1054. {
  1055. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  1056. }
  1057. return sparseMemoryRequirements;
  1058. }
  1059. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1060. template <typename Dispatch>
  1061. VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
  1062. VULKAN_HPP_NAMESPACE::ImageType type,
  1063. VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
  1064. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  1065. VULKAN_HPP_NAMESPACE::ImageTiling tiling,
  1066. uint32_t * pPropertyCount,
  1067. VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties,
  1068. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1069. {
  1070. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1071. d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
  1072. static_cast<VkFormat>( format ),
  1073. static_cast<VkImageType>( type ),
  1074. static_cast<VkSampleCountFlagBits>( samples ),
  1075. static_cast<VkImageUsageFlags>( usage ),
  1076. static_cast<VkImageTiling>( tiling ),
  1077. pPropertyCount,
  1078. reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) );
  1079. }
  1080. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1081. template <typename SparseImageFormatPropertiesAllocator, typename Dispatch>
  1082. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
  1083. PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
  1084. VULKAN_HPP_NAMESPACE::ImageType type,
  1085. VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
  1086. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  1087. VULKAN_HPP_NAMESPACE::ImageTiling tiling,
  1088. Dispatch const & d ) const
  1089. {
  1090. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1091. std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties;
  1092. uint32_t propertyCount;
  1093. d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
  1094. static_cast<VkFormat>( format ),
  1095. static_cast<VkImageType>( type ),
  1096. static_cast<VkSampleCountFlagBits>( samples ),
  1097. static_cast<VkImageUsageFlags>( usage ),
  1098. static_cast<VkImageTiling>( tiling ),
  1099. &propertyCount,
  1100. nullptr );
  1101. properties.resize( propertyCount );
  1102. d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
  1103. static_cast<VkFormat>( format ),
  1104. static_cast<VkImageType>( type ),
  1105. static_cast<VkSampleCountFlagBits>( samples ),
  1106. static_cast<VkImageUsageFlags>( usage ),
  1107. static_cast<VkImageTiling>( tiling ),
  1108. &propertyCount,
  1109. reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
  1110. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  1111. if ( propertyCount < properties.size() )
  1112. {
  1113. properties.resize( propertyCount );
  1114. }
  1115. return properties;
  1116. }
  1117. template <typename SparseImageFormatPropertiesAllocator,
  1118. typename Dispatch,
  1119. typename B1,
  1120. typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties>::value, int>::type>
  1121. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator>
  1122. PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format,
  1123. VULKAN_HPP_NAMESPACE::ImageType type,
  1124. VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
  1125. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  1126. VULKAN_HPP_NAMESPACE::ImageTiling tiling,
  1127. SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator,
  1128. Dispatch const & d ) const
  1129. {
  1130. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1131. std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties( sparseImageFormatPropertiesAllocator );
  1132. uint32_t propertyCount;
  1133. d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
  1134. static_cast<VkFormat>( format ),
  1135. static_cast<VkImageType>( type ),
  1136. static_cast<VkSampleCountFlagBits>( samples ),
  1137. static_cast<VkImageUsageFlags>( usage ),
  1138. static_cast<VkImageTiling>( tiling ),
  1139. &propertyCount,
  1140. nullptr );
  1141. properties.resize( propertyCount );
  1142. d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice,
  1143. static_cast<VkFormat>( format ),
  1144. static_cast<VkImageType>( type ),
  1145. static_cast<VkSampleCountFlagBits>( samples ),
  1146. static_cast<VkImageUsageFlags>( usage ),
  1147. static_cast<VkImageTiling>( tiling ),
  1148. &propertyCount,
  1149. reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
  1150. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  1151. if ( propertyCount < properties.size() )
  1152. {
  1153. properties.resize( propertyCount );
  1154. }
  1155. return properties;
  1156. }
  1157. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1158. template <typename Dispatch>
  1159. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount,
  1160. const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo,
  1161. VULKAN_HPP_NAMESPACE::Fence fence,
  1162. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1163. {
  1164. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1165. return static_cast<Result>(
  1166. d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) );
  1167. }
  1168. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1169. template <typename Dispatch>
  1170. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::bindSparse(
  1171. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
  1172. {
  1173. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1174. VkResult result =
  1175. d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) );
  1176. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" );
  1177. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  1178. }
  1179. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1180. template <typename Dispatch>
  1181. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo,
  1182. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1183. VULKAN_HPP_NAMESPACE::Fence * pFence,
  1184. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1185. {
  1186. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1187. return static_cast<Result>( d.vkCreateFence( m_device,
  1188. reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ),
  1189. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  1190. reinterpret_cast<VkFence *>( pFence ) ) );
  1191. }
  1192. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1193. template <typename Dispatch>
  1194. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::createFence(
  1195. const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  1196. {
  1197. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1198. VULKAN_HPP_NAMESPACE::Fence fence;
  1199. VkResult result =
  1200. d.vkCreateFence( m_device,
  1201. reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
  1202. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1203. reinterpret_cast<VkFence *>( &fence ) );
  1204. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" );
  1205. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
  1206. }
  1207. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  1208. template <typename Dispatch>
  1209. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::createFenceUnique(
  1210. const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  1211. {
  1212. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1213. VULKAN_HPP_NAMESPACE::Fence fence;
  1214. VkResult result =
  1215. d.vkCreateFence( m_device,
  1216. reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ),
  1217. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1218. reinterpret_cast<VkFence *>( &fence ) );
  1219. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" );
  1220. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1221. UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  1222. }
  1223. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  1224. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1225. template <typename Dispatch>
  1226. VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence,
  1227. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1228. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1229. {
  1230. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1231. d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1232. }
  1233. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1234. template <typename Dispatch>
  1235. VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence,
  1236. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1237. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1238. {
  1239. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1240. d.vkDestroyFence( m_device,
  1241. static_cast<VkFence>( fence ),
  1242. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1243. }
  1244. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1245. template <typename Dispatch>
  1246. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence,
  1247. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1248. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1249. {
  1250. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1251. d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1252. }
  1253. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1254. template <typename Dispatch>
  1255. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence,
  1256. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1257. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1258. {
  1259. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1260. d.vkDestroyFence( m_device,
  1261. static_cast<VkFence>( fence ),
  1262. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1263. }
  1264. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1265. template <typename Dispatch>
  1266. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount,
  1267. const VULKAN_HPP_NAMESPACE::Fence * pFences,
  1268. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1269. {
  1270. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1271. return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) );
  1272. }
  1273. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1274. template <typename Dispatch>
  1275. VULKAN_HPP_INLINE typename ResultValueType<void>::type
  1276. Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d ) const
  1277. {
  1278. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1279. VkResult result = d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) );
  1280. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
  1281. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  1282. }
  1283. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1284. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1285. template <typename Dispatch>
  1286. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1287. {
  1288. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1289. return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
  1290. }
  1291. #else
  1292. template <typename Dispatch>
  1293. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
  1294. {
  1295. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1296. VkResult result = d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) );
  1297. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1298. VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus",
  1299. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
  1300. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  1301. }
  1302. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1303. template <typename Dispatch>
  1304. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount,
  1305. const VULKAN_HPP_NAMESPACE::Fence * pFences,
  1306. VULKAN_HPP_NAMESPACE::Bool32 waitAll,
  1307. uint64_t timeout,
  1308. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1309. {
  1310. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1311. return static_cast<Result>(
  1312. d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
  1313. }
  1314. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1315. template <typename Dispatch>
  1316. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  1317. Device::waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences,
  1318. VULKAN_HPP_NAMESPACE::Bool32 waitAll,
  1319. uint64_t timeout,
  1320. Dispatch const & d ) const
  1321. {
  1322. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1323. VkResult result =
  1324. d.vkWaitForFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout );
  1325. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1326. VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences",
  1327. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
  1328. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  1329. }
  1330. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1331. template <typename Dispatch>
  1332. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo,
  1333. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1334. VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore,
  1335. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1336. {
  1337. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1338. return static_cast<Result>( d.vkCreateSemaphore( m_device,
  1339. reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ),
  1340. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  1341. reinterpret_cast<VkSemaphore *>( pSemaphore ) ) );
  1342. }
  1343. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1344. template <typename Dispatch>
  1345. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type
  1346. Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,
  1347. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1348. Dispatch const & d ) const
  1349. {
  1350. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1351. VULKAN_HPP_NAMESPACE::Semaphore semaphore;
  1352. VkResult result =
  1353. d.vkCreateSemaphore( m_device,
  1354. reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
  1355. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1356. reinterpret_cast<VkSemaphore *>( &semaphore ) );
  1357. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" );
  1358. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), semaphore );
  1359. }
  1360. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  1361. template <typename Dispatch>
  1362. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type
  1363. Device::createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo,
  1364. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1365. Dispatch const & d ) const
  1366. {
  1367. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1368. VULKAN_HPP_NAMESPACE::Semaphore semaphore;
  1369. VkResult result =
  1370. d.vkCreateSemaphore( m_device,
  1371. reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ),
  1372. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1373. reinterpret_cast<VkSemaphore *>( &semaphore ) );
  1374. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" );
  1375. return createResultValueType(
  1376. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1377. UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>( semaphore, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  1378. }
  1379. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  1380. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1381. template <typename Dispatch>
  1382. VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
  1383. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1384. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1385. {
  1386. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1387. d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1388. }
  1389. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1390. template <typename Dispatch>
  1391. VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
  1392. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1393. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1394. {
  1395. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1396. d.vkDestroySemaphore( m_device,
  1397. static_cast<VkSemaphore>( semaphore ),
  1398. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1399. }
  1400. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1401. template <typename Dispatch>
  1402. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
  1403. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1404. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1405. {
  1406. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1407. d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1408. }
  1409. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1410. template <typename Dispatch>
  1411. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
  1412. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1413. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1414. {
  1415. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1416. d.vkDestroySemaphore( m_device,
  1417. static_cast<VkSemaphore>( semaphore ),
  1418. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1419. }
  1420. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1421. template <typename Dispatch>
  1422. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo,
  1423. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1424. VULKAN_HPP_NAMESPACE::Event * pEvent,
  1425. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1426. {
  1427. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1428. return static_cast<Result>( d.vkCreateEvent( m_device,
  1429. reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ),
  1430. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  1431. reinterpret_cast<VkEvent *>( pEvent ) ) );
  1432. }
  1433. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1434. template <typename Dispatch>
  1435. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type Device::createEvent(
  1436. const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  1437. {
  1438. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1439. VULKAN_HPP_NAMESPACE::Event event;
  1440. VkResult result =
  1441. d.vkCreateEvent( m_device,
  1442. reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
  1443. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1444. reinterpret_cast<VkEvent *>( &event ) );
  1445. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" );
  1446. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), event );
  1447. }
  1448. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  1449. template <typename Dispatch>
  1450. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type Device::createEventUnique(
  1451. const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  1452. {
  1453. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1454. VULKAN_HPP_NAMESPACE::Event event;
  1455. VkResult result =
  1456. d.vkCreateEvent( m_device,
  1457. reinterpret_cast<const VkEventCreateInfo *>( &createInfo ),
  1458. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1459. reinterpret_cast<VkEvent *>( &event ) );
  1460. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" );
  1461. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1462. UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>( event, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  1463. }
  1464. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  1465. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1466. template <typename Dispatch>
  1467. VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event,
  1468. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1469. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1470. {
  1471. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1472. d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1473. }
  1474. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1475. template <typename Dispatch>
  1476. VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event,
  1477. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1478. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1479. {
  1480. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1481. d.vkDestroyEvent( m_device,
  1482. static_cast<VkEvent>( event ),
  1483. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1484. }
  1485. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1486. template <typename Dispatch>
  1487. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event,
  1488. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1489. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1490. {
  1491. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1492. d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1493. }
  1494. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1495. template <typename Dispatch>
  1496. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event,
  1497. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1498. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1499. {
  1500. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1501. d.vkDestroyEvent( m_device,
  1502. static_cast<VkEvent>( event ),
  1503. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1504. }
  1505. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1506. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1507. template <typename Dispatch>
  1508. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1509. {
  1510. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1511. return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
  1512. }
  1513. #else
  1514. template <typename Dispatch>
  1515. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
  1516. {
  1517. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1518. VkResult result = d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) );
  1519. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1520. VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus",
  1521. { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } );
  1522. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  1523. }
  1524. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1525. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1526. template <typename Dispatch>
  1527. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1528. {
  1529. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1530. return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
  1531. }
  1532. #else
  1533. template <typename Dispatch>
  1534. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event,
  1535. Dispatch const & d ) const
  1536. {
  1537. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1538. VkResult result = d.vkSetEvent( m_device, static_cast<VkEvent>( event ) );
  1539. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" );
  1540. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  1541. }
  1542. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1543. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1544. template <typename Dispatch>
  1545. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1546. {
  1547. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1548. return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
  1549. }
  1550. #else
  1551. template <typename Dispatch>
  1552. VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
  1553. {
  1554. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1555. VkResult result = d.vkResetEvent( m_device, static_cast<VkEvent>( event ) );
  1556. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" );
  1557. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  1558. }
  1559. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  1560. template <typename Dispatch>
  1561. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo,
  1562. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1563. VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool,
  1564. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1565. {
  1566. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1567. return static_cast<Result>( d.vkCreateQueryPool( m_device,
  1568. reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ),
  1569. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  1570. reinterpret_cast<VkQueryPool *>( pQueryPool ) ) );
  1571. }
  1572. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1573. template <typename Dispatch>
  1574. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type
  1575. Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,
  1576. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1577. Dispatch const & d ) const
  1578. {
  1579. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1580. VULKAN_HPP_NAMESPACE::QueryPool queryPool;
  1581. VkResult result =
  1582. d.vkCreateQueryPool( m_device,
  1583. reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
  1584. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1585. reinterpret_cast<VkQueryPool *>( &queryPool ) );
  1586. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" );
  1587. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), queryPool );
  1588. }
  1589. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  1590. template <typename Dispatch>
  1591. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type
  1592. Device::createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo,
  1593. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1594. Dispatch const & d ) const
  1595. {
  1596. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1597. VULKAN_HPP_NAMESPACE::QueryPool queryPool;
  1598. VkResult result =
  1599. d.vkCreateQueryPool( m_device,
  1600. reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ),
  1601. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1602. reinterpret_cast<VkQueryPool *>( &queryPool ) );
  1603. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" );
  1604. return createResultValueType(
  1605. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1606. UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>( queryPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  1607. }
  1608. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  1609. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1610. template <typename Dispatch>
  1611. VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  1612. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1613. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1614. {
  1615. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1616. d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1617. }
  1618. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1619. template <typename Dispatch>
  1620. VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  1621. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1622. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1623. {
  1624. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1625. d.vkDestroyQueryPool( m_device,
  1626. static_cast<VkQueryPool>( queryPool ),
  1627. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1628. }
  1629. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1630. template <typename Dispatch>
  1631. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  1632. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1633. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1634. {
  1635. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1636. d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1637. }
  1638. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1639. template <typename Dispatch>
  1640. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  1641. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1642. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1643. {
  1644. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1645. d.vkDestroyQueryPool( m_device,
  1646. static_cast<VkQueryPool>( queryPool ),
  1647. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1648. }
  1649. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1650. template <typename Dispatch>
  1651. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  1652. uint32_t firstQuery,
  1653. uint32_t queryCount,
  1654. size_t dataSize,
  1655. void * pData,
  1656. VULKAN_HPP_NAMESPACE::DeviceSize stride,
  1657. VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
  1658. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1659. {
  1660. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1661. return static_cast<Result>( d.vkGetQueryPoolResults( m_device,
  1662. static_cast<VkQueryPool>( queryPool ),
  1663. firstQuery,
  1664. queryCount,
  1665. dataSize,
  1666. pData,
  1667. static_cast<VkDeviceSize>( stride ),
  1668. static_cast<VkQueryResultFlags>( flags ) ) );
  1669. }
  1670. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1671. template <typename DataType, typename DataTypeAllocator, typename Dispatch>
  1672. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<DataType, DataTypeAllocator>>
  1673. Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  1674. uint32_t firstQuery,
  1675. uint32_t queryCount,
  1676. size_t dataSize,
  1677. VULKAN_HPP_NAMESPACE::DeviceSize stride,
  1678. VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
  1679. Dispatch const & d ) const
  1680. {
  1681. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1682. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  1683. std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
  1684. VkResult result = d.vkGetQueryPoolResults( m_device,
  1685. static_cast<VkQueryPool>( queryPool ),
  1686. firstQuery,
  1687. queryCount,
  1688. data.size() * sizeof( DataType ),
  1689. reinterpret_cast<void *>( data.data() ),
  1690. static_cast<VkDeviceSize>( stride ),
  1691. static_cast<VkQueryResultFlags>( flags ) );
  1692. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1693. VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults",
  1694. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
  1695. return ResultValue<std::vector<DataType, DataTypeAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  1696. }
  1697. template <typename DataType, typename Dispatch>
  1698. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<DataType> Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  1699. uint32_t firstQuery,
  1700. uint32_t queryCount,
  1701. VULKAN_HPP_NAMESPACE::DeviceSize stride,
  1702. VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
  1703. Dispatch const & d ) const
  1704. {
  1705. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1706. DataType data;
  1707. VkResult result = d.vkGetQueryPoolResults( m_device,
  1708. static_cast<VkQueryPool>( queryPool ),
  1709. firstQuery,
  1710. queryCount,
  1711. sizeof( DataType ),
  1712. reinterpret_cast<void *>( &data ),
  1713. static_cast<VkDeviceSize>( stride ),
  1714. static_cast<VkQueryResultFlags>( flags ) );
  1715. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1716. VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult",
  1717. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
  1718. return ResultValue<DataType>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  1719. }
  1720. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1721. template <typename Dispatch>
  1722. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo,
  1723. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1724. VULKAN_HPP_NAMESPACE::Buffer * pBuffer,
  1725. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1726. {
  1727. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1728. return static_cast<Result>( d.vkCreateBuffer( m_device,
  1729. reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ),
  1730. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  1731. reinterpret_cast<VkBuffer *>( pBuffer ) ) );
  1732. }
  1733. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1734. template <typename Dispatch>
  1735. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type Device::createBuffer(
  1736. const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  1737. {
  1738. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1739. VULKAN_HPP_NAMESPACE::Buffer buffer;
  1740. VkResult result =
  1741. d.vkCreateBuffer( m_device,
  1742. reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
  1743. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1744. reinterpret_cast<VkBuffer *>( &buffer ) );
  1745. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" );
  1746. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), buffer );
  1747. }
  1748. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  1749. template <typename Dispatch>
  1750. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type Device::createBufferUnique(
  1751. const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  1752. {
  1753. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1754. VULKAN_HPP_NAMESPACE::Buffer buffer;
  1755. VkResult result =
  1756. d.vkCreateBuffer( m_device,
  1757. reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ),
  1758. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1759. reinterpret_cast<VkBuffer *>( &buffer ) );
  1760. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" );
  1761. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1762. UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>( buffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  1763. }
  1764. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  1765. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1766. template <typename Dispatch>
  1767. VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
  1768. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1769. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1770. {
  1771. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1772. d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1773. }
  1774. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1775. template <typename Dispatch>
  1776. VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
  1777. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1778. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1779. {
  1780. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1781. d.vkDestroyBuffer( m_device,
  1782. static_cast<VkBuffer>( buffer ),
  1783. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1784. }
  1785. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1786. template <typename Dispatch>
  1787. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
  1788. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1789. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1790. {
  1791. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1792. d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1793. }
  1794. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1795. template <typename Dispatch>
  1796. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer,
  1797. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1798. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1799. {
  1800. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1801. d.vkDestroyBuffer( m_device,
  1802. static_cast<VkBuffer>( buffer ),
  1803. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1804. }
  1805. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1806. template <typename Dispatch>
  1807. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo,
  1808. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1809. VULKAN_HPP_NAMESPACE::BufferView * pView,
  1810. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1811. {
  1812. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1813. return static_cast<Result>( d.vkCreateBufferView( m_device,
  1814. reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ),
  1815. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  1816. reinterpret_cast<VkBufferView *>( pView ) ) );
  1817. }
  1818. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1819. template <typename Dispatch>
  1820. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type
  1821. Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,
  1822. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1823. Dispatch const & d ) const
  1824. {
  1825. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1826. VULKAN_HPP_NAMESPACE::BufferView view;
  1827. VkResult result =
  1828. d.vkCreateBufferView( m_device,
  1829. reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
  1830. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1831. reinterpret_cast<VkBufferView *>( &view ) );
  1832. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" );
  1833. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), view );
  1834. }
  1835. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  1836. template <typename Dispatch>
  1837. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type
  1838. Device::createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo,
  1839. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1840. Dispatch const & d ) const
  1841. {
  1842. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1843. VULKAN_HPP_NAMESPACE::BufferView view;
  1844. VkResult result =
  1845. d.vkCreateBufferView( m_device,
  1846. reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ),
  1847. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1848. reinterpret_cast<VkBufferView *>( &view ) );
  1849. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" );
  1850. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1851. UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  1852. }
  1853. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  1854. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1855. template <typename Dispatch>
  1856. VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView,
  1857. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1858. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1859. {
  1860. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1861. d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1862. }
  1863. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1864. template <typename Dispatch>
  1865. VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView,
  1866. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1867. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1868. {
  1869. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1870. d.vkDestroyBufferView( m_device,
  1871. static_cast<VkBufferView>( bufferView ),
  1872. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1873. }
  1874. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1875. template <typename Dispatch>
  1876. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
  1877. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1878. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1879. {
  1880. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1881. d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1882. }
  1883. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1884. template <typename Dispatch>
  1885. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView,
  1886. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1887. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1888. {
  1889. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1890. d.vkDestroyBufferView( m_device,
  1891. static_cast<VkBufferView>( bufferView ),
  1892. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1893. }
  1894. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1895. template <typename Dispatch>
  1896. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo,
  1897. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1898. VULKAN_HPP_NAMESPACE::Image * pImage,
  1899. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1900. {
  1901. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1902. return static_cast<Result>( d.vkCreateImage( m_device,
  1903. reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ),
  1904. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  1905. reinterpret_cast<VkImage *>( pImage ) ) );
  1906. }
  1907. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1908. template <typename Dispatch>
  1909. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type Device::createImage(
  1910. const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  1911. {
  1912. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1913. VULKAN_HPP_NAMESPACE::Image image;
  1914. VkResult result =
  1915. d.vkCreateImage( m_device,
  1916. reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
  1917. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1918. reinterpret_cast<VkImage *>( &image ) );
  1919. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" );
  1920. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), image );
  1921. }
  1922. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  1923. template <typename Dispatch>
  1924. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type Device::createImageUnique(
  1925. const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  1926. {
  1927. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1928. VULKAN_HPP_NAMESPACE::Image image;
  1929. VkResult result =
  1930. d.vkCreateImage( m_device,
  1931. reinterpret_cast<const VkImageCreateInfo *>( &createInfo ),
  1932. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  1933. reinterpret_cast<VkImage *>( &image ) );
  1934. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" );
  1935. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  1936. UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>( image, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  1937. }
  1938. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  1939. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1940. template <typename Dispatch>
  1941. VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image,
  1942. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1943. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1944. {
  1945. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1946. d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1947. }
  1948. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1949. template <typename Dispatch>
  1950. VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image,
  1951. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1952. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1953. {
  1954. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1955. d.vkDestroyImage( m_device,
  1956. static_cast<VkImage>( image ),
  1957. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1958. }
  1959. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1960. template <typename Dispatch>
  1961. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image,
  1962. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  1963. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1964. {
  1965. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1966. d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  1967. }
  1968. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1969. template <typename Dispatch>
  1970. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image,
  1971. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  1972. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1973. {
  1974. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1975. d.vkDestroyImage( m_device,
  1976. static_cast<VkImage>( image ),
  1977. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  1978. }
  1979. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  1980. template <typename Dispatch>
  1981. VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image,
  1982. const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource,
  1983. VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout,
  1984. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1985. {
  1986. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1987. d.vkGetImageSubresourceLayout( m_device,
  1988. static_cast<VkImage>( image ),
  1989. reinterpret_cast<const VkImageSubresource *>( pSubresource ),
  1990. reinterpret_cast<VkSubresourceLayout *>( pLayout ) );
  1991. }
  1992. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  1993. template <typename Dispatch>
  1994. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout(
  1995. VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  1996. {
  1997. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  1998. VULKAN_HPP_NAMESPACE::SubresourceLayout layout;
  1999. d.vkGetImageSubresourceLayout( m_device,
  2000. static_cast<VkImage>( image ),
  2001. reinterpret_cast<const VkImageSubresource *>( &subresource ),
  2002. reinterpret_cast<VkSubresourceLayout *>( &layout ) );
  2003. return layout;
  2004. }
  2005. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2006. template <typename Dispatch>
  2007. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo,
  2008. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2009. VULKAN_HPP_NAMESPACE::ImageView * pView,
  2010. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2011. {
  2012. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2013. return static_cast<Result>( d.vkCreateImageView( m_device,
  2014. reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ),
  2015. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  2016. reinterpret_cast<VkImageView *>( pView ) ) );
  2017. }
  2018. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2019. template <typename Dispatch>
  2020. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type
  2021. Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,
  2022. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2023. Dispatch const & d ) const
  2024. {
  2025. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2026. VULKAN_HPP_NAMESPACE::ImageView view;
  2027. VkResult result =
  2028. d.vkCreateImageView( m_device,
  2029. reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
  2030. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2031. reinterpret_cast<VkImageView *>( &view ) );
  2032. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" );
  2033. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), view );
  2034. }
  2035. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  2036. template <typename Dispatch>
  2037. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type
  2038. Device::createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo,
  2039. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2040. Dispatch const & d ) const
  2041. {
  2042. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2043. VULKAN_HPP_NAMESPACE::ImageView view;
  2044. VkResult result =
  2045. d.vkCreateImageView( m_device,
  2046. reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ),
  2047. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2048. reinterpret_cast<VkImageView *>( &view ) );
  2049. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" );
  2050. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2051. UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  2052. }
  2053. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  2054. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2055. template <typename Dispatch>
  2056. VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView,
  2057. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2058. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2059. {
  2060. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2061. d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2062. }
  2063. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2064. template <typename Dispatch>
  2065. VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView,
  2066. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2067. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2068. {
  2069. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2070. d.vkDestroyImageView( m_device,
  2071. static_cast<VkImageView>( imageView ),
  2072. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2073. }
  2074. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2075. template <typename Dispatch>
  2076. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
  2077. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2078. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2079. {
  2080. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2081. d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2082. }
  2083. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2084. template <typename Dispatch>
  2085. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView,
  2086. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2087. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2088. {
  2089. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2090. d.vkDestroyImageView( m_device,
  2091. static_cast<VkImageView>( imageView ),
  2092. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2093. }
  2094. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2095. template <typename Dispatch>
  2096. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,
  2097. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2098. VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule,
  2099. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2100. {
  2101. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2102. return static_cast<Result>( d.vkCreateShaderModule( m_device,
  2103. reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ),
  2104. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  2105. reinterpret_cast<VkShaderModule *>( pShaderModule ) ) );
  2106. }
  2107. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2108. template <typename Dispatch>
  2109. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type
  2110. Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
  2111. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2112. Dispatch const & d ) const
  2113. {
  2114. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2115. VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
  2116. VkResult result =
  2117. d.vkCreateShaderModule( m_device,
  2118. reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
  2119. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2120. reinterpret_cast<VkShaderModule *>( &shaderModule ) );
  2121. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" );
  2122. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), shaderModule );
  2123. }
  2124. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  2125. template <typename Dispatch>
  2126. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type
  2127. Device::createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
  2128. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2129. Dispatch const & d ) const
  2130. {
  2131. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2132. VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
  2133. VkResult result =
  2134. d.vkCreateShaderModule( m_device,
  2135. reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ),
  2136. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2137. reinterpret_cast<VkShaderModule *>( &shaderModule ) );
  2138. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" );
  2139. return createResultValueType(
  2140. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2141. UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>( shaderModule, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  2142. }
  2143. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  2144. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2145. template <typename Dispatch>
  2146. VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
  2147. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2148. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2149. {
  2150. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2151. d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2152. }
  2153. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2154. template <typename Dispatch>
  2155. VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
  2156. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2157. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2158. {
  2159. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2160. d.vkDestroyShaderModule( m_device,
  2161. static_cast<VkShaderModule>( shaderModule ),
  2162. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2163. }
  2164. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2165. template <typename Dispatch>
  2166. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
  2167. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2168. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2169. {
  2170. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2171. d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2172. }
  2173. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2174. template <typename Dispatch>
  2175. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
  2176. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2177. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2178. {
  2179. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2180. d.vkDestroyShaderModule( m_device,
  2181. static_cast<VkShaderModule>( shaderModule ),
  2182. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2183. }
  2184. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2185. template <typename Dispatch>
  2186. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo,
  2187. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2188. VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache,
  2189. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2190. {
  2191. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2192. return static_cast<Result>( d.vkCreatePipelineCache( m_device,
  2193. reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ),
  2194. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  2195. reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) );
  2196. }
  2197. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2198. template <typename Dispatch>
  2199. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type
  2200. Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,
  2201. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2202. Dispatch const & d ) const
  2203. {
  2204. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2205. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
  2206. VkResult result =
  2207. d.vkCreatePipelineCache( m_device,
  2208. reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
  2209. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2210. reinterpret_cast<VkPipelineCache *>( &pipelineCache ) );
  2211. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" );
  2212. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineCache );
  2213. }
  2214. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  2215. template <typename Dispatch>
  2216. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type
  2217. Device::createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo,
  2218. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2219. Dispatch const & d ) const
  2220. {
  2221. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2222. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
  2223. VkResult result =
  2224. d.vkCreatePipelineCache( m_device,
  2225. reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ),
  2226. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2227. reinterpret_cast<VkPipelineCache *>( &pipelineCache ) );
  2228. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" );
  2229. return createResultValueType(
  2230. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2231. UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>( pipelineCache, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  2232. }
  2233. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  2234. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2235. template <typename Dispatch>
  2236. VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2237. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2238. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2239. {
  2240. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2241. d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2242. }
  2243. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2244. template <typename Dispatch>
  2245. VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2246. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2247. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2248. {
  2249. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2250. d.vkDestroyPipelineCache( m_device,
  2251. static_cast<VkPipelineCache>( pipelineCache ),
  2252. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2253. }
  2254. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2255. template <typename Dispatch>
  2256. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2257. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2258. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2259. {
  2260. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2261. d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2262. }
  2263. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2264. template <typename Dispatch>
  2265. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2266. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2267. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2268. {
  2269. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2270. d.vkDestroyPipelineCache( m_device,
  2271. static_cast<VkPipelineCache>( pipelineCache ),
  2272. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2273. }
  2274. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2275. template <typename Dispatch>
  2276. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2277. size_t * pDataSize,
  2278. void * pData,
  2279. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2280. {
  2281. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2282. return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
  2283. }
  2284. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2285. template <typename Uint8_tAllocator, typename Dispatch>
  2286. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
  2287. Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const
  2288. {
  2289. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2290. std::vector<uint8_t, Uint8_tAllocator> data;
  2291. size_t dataSize;
  2292. VkResult result;
  2293. do
  2294. {
  2295. result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr );
  2296. if ( ( result == VK_SUCCESS ) && dataSize )
  2297. {
  2298. data.resize( dataSize );
  2299. result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
  2300. }
  2301. } while ( result == VK_INCOMPLETE );
  2302. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
  2303. VULKAN_HPP_ASSERT( dataSize <= data.size() );
  2304. if ( dataSize < data.size() )
  2305. {
  2306. data.resize( dataSize );
  2307. }
  2308. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  2309. }
  2310. template <typename Uint8_tAllocator,
  2311. typename Dispatch,
  2312. typename B1,
  2313. typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type>
  2314. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
  2315. Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
  2316. {
  2317. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2318. std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
  2319. size_t dataSize;
  2320. VkResult result;
  2321. do
  2322. {
  2323. result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr );
  2324. if ( ( result == VK_SUCCESS ) && dataSize )
  2325. {
  2326. data.resize( dataSize );
  2327. result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
  2328. }
  2329. } while ( result == VK_INCOMPLETE );
  2330. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
  2331. VULKAN_HPP_ASSERT( dataSize <= data.size() );
  2332. if ( dataSize < data.size() )
  2333. {
  2334. data.resize( dataSize );
  2335. }
  2336. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  2337. }
  2338. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2339. template <typename Dispatch>
  2340. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
  2341. uint32_t srcCacheCount,
  2342. const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches,
  2343. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2344. {
  2345. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2346. return static_cast<Result>(
  2347. d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) );
  2348. }
  2349. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2350. template <typename Dispatch>
  2351. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  2352. Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache,
  2353. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches,
  2354. Dispatch const & d ) const
  2355. {
  2356. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2357. VkResult result = d.vkMergePipelineCaches(
  2358. m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size(), reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) );
  2359. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" );
  2360. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  2361. }
  2362. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2363. template <typename Dispatch>
  2364. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2365. uint32_t createInfoCount,
  2366. const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos,
  2367. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2368. VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
  2369. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2370. {
  2371. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2372. return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device,
  2373. static_cast<VkPipelineCache>( pipelineCache ),
  2374. createInfoCount,
  2375. reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ),
  2376. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  2377. reinterpret_cast<VkPipeline *>( pPipelines ) ) );
  2378. }
  2379. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2380. template <typename PipelineAllocator, typename Dispatch>
  2381. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
  2382. Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2383. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
  2384. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2385. Dispatch const & d ) const
  2386. {
  2387. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2388. std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
  2389. VkResult result = d.vkCreateGraphicsPipelines(
  2390. m_device,
  2391. static_cast<VkPipelineCache>( pipelineCache ),
  2392. createInfos.size(),
  2393. reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
  2394. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2395. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  2396. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2397. VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines",
  2398. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2399. return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
  2400. }
  2401. template <typename PipelineAllocator,
  2402. typename Dispatch,
  2403. typename B0,
  2404. typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
  2405. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
  2406. Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2407. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
  2408. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2409. PipelineAllocator & pipelineAllocator,
  2410. Dispatch const & d ) const
  2411. {
  2412. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2413. std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
  2414. VkResult result = d.vkCreateGraphicsPipelines(
  2415. m_device,
  2416. static_cast<VkPipelineCache>( pipelineCache ),
  2417. createInfos.size(),
  2418. reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
  2419. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2420. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  2421. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2422. VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines",
  2423. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2424. return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
  2425. }
  2426. template <typename Dispatch>
  2427. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
  2428. Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2429. const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
  2430. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2431. Dispatch const & d ) const
  2432. {
  2433. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2434. VULKAN_HPP_NAMESPACE::Pipeline pipeline;
  2435. VkResult result = d.vkCreateGraphicsPipelines(
  2436. m_device,
  2437. static_cast<VkPipelineCache>( pipelineCache ),
  2438. 1,
  2439. reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
  2440. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2441. reinterpret_cast<VkPipeline *>( &pipeline ) );
  2442. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2443. VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline",
  2444. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2445. return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
  2446. }
  2447. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  2448. template <typename Dispatch, typename PipelineAllocator>
  2449. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
  2450. Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2451. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
  2452. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2453. Dispatch const & d ) const
  2454. {
  2455. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2456. std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
  2457. VkResult result = d.vkCreateGraphicsPipelines(
  2458. m_device,
  2459. static_cast<VkPipelineCache>( pipelineCache ),
  2460. createInfos.size(),
  2461. reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
  2462. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2463. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  2464. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2465. VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique",
  2466. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2467. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
  2468. uniquePipelines.reserve( createInfos.size() );
  2469. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  2470. for ( auto const & pipeline : pipelines )
  2471. {
  2472. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
  2473. }
  2474. return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
  2475. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
  2476. }
  2477. template <typename Dispatch,
  2478. typename PipelineAllocator,
  2479. typename B0,
  2480. typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
  2481. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
  2482. Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2483. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos,
  2484. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2485. PipelineAllocator & pipelineAllocator,
  2486. Dispatch const & d ) const
  2487. {
  2488. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2489. std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
  2490. VkResult result = d.vkCreateGraphicsPipelines(
  2491. m_device,
  2492. static_cast<VkPipelineCache>( pipelineCache ),
  2493. createInfos.size(),
  2494. reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ),
  2495. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2496. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  2497. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2498. VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique",
  2499. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2500. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
  2501. uniquePipelines.reserve( createInfos.size() );
  2502. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  2503. for ( auto const & pipeline : pipelines )
  2504. {
  2505. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
  2506. }
  2507. return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
  2508. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
  2509. }
  2510. template <typename Dispatch>
  2511. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
  2512. Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2513. const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo,
  2514. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2515. Dispatch const & d ) const
  2516. {
  2517. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2518. VULKAN_HPP_NAMESPACE::Pipeline pipeline;
  2519. VkResult result = d.vkCreateGraphicsPipelines(
  2520. m_device,
  2521. static_cast<VkPipelineCache>( pipelineCache ),
  2522. 1,
  2523. reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ),
  2524. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2525. reinterpret_cast<VkPipeline *>( &pipeline ) );
  2526. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2527. VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique",
  2528. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2529. return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>(
  2530. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2531. UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  2532. }
  2533. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  2534. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2535. template <typename Dispatch>
  2536. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2537. uint32_t createInfoCount,
  2538. const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos,
  2539. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2540. VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
  2541. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2542. {
  2543. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2544. return static_cast<Result>( d.vkCreateComputePipelines( m_device,
  2545. static_cast<VkPipelineCache>( pipelineCache ),
  2546. createInfoCount,
  2547. reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ),
  2548. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  2549. reinterpret_cast<VkPipeline *>( pPipelines ) ) );
  2550. }
  2551. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2552. template <typename PipelineAllocator, typename Dispatch>
  2553. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
  2554. Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2555. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
  2556. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2557. Dispatch const & d ) const
  2558. {
  2559. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2560. std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
  2561. VkResult result = d.vkCreateComputePipelines(
  2562. m_device,
  2563. static_cast<VkPipelineCache>( pipelineCache ),
  2564. createInfos.size(),
  2565. reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
  2566. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2567. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  2568. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2569. VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines",
  2570. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2571. return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
  2572. }
  2573. template <typename PipelineAllocator,
  2574. typename Dispatch,
  2575. typename B0,
  2576. typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
  2577. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
  2578. Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2579. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
  2580. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2581. PipelineAllocator & pipelineAllocator,
  2582. Dispatch const & d ) const
  2583. {
  2584. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2585. std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
  2586. VkResult result = d.vkCreateComputePipelines(
  2587. m_device,
  2588. static_cast<VkPipelineCache>( pipelineCache ),
  2589. createInfos.size(),
  2590. reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
  2591. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2592. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  2593. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2594. VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines",
  2595. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2596. return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
  2597. }
  2598. template <typename Dispatch>
  2599. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
  2600. Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2601. const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
  2602. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2603. Dispatch const & d ) const
  2604. {
  2605. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2606. VULKAN_HPP_NAMESPACE::Pipeline pipeline;
  2607. VkResult result = d.vkCreateComputePipelines(
  2608. m_device,
  2609. static_cast<VkPipelineCache>( pipelineCache ),
  2610. 1,
  2611. reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
  2612. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2613. reinterpret_cast<VkPipeline *>( &pipeline ) );
  2614. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2615. VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline",
  2616. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2617. return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
  2618. }
  2619. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  2620. template <typename Dispatch, typename PipelineAllocator>
  2621. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
  2622. Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2623. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
  2624. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2625. Dispatch const & d ) const
  2626. {
  2627. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2628. std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
  2629. VkResult result = d.vkCreateComputePipelines(
  2630. m_device,
  2631. static_cast<VkPipelineCache>( pipelineCache ),
  2632. createInfos.size(),
  2633. reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
  2634. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2635. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  2636. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2637. VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique",
  2638. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2639. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
  2640. uniquePipelines.reserve( createInfos.size() );
  2641. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  2642. for ( auto const & pipeline : pipelines )
  2643. {
  2644. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
  2645. }
  2646. return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
  2647. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
  2648. }
  2649. template <typename Dispatch,
  2650. typename PipelineAllocator,
  2651. typename B0,
  2652. typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
  2653. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
  2654. Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2655. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos,
  2656. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2657. PipelineAllocator & pipelineAllocator,
  2658. Dispatch const & d ) const
  2659. {
  2660. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2661. std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
  2662. VkResult result = d.vkCreateComputePipelines(
  2663. m_device,
  2664. static_cast<VkPipelineCache>( pipelineCache ),
  2665. createInfos.size(),
  2666. reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ),
  2667. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2668. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  2669. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2670. VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique",
  2671. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2672. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
  2673. uniquePipelines.reserve( createInfos.size() );
  2674. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  2675. for ( auto const & pipeline : pipelines )
  2676. {
  2677. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
  2678. }
  2679. return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
  2680. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
  2681. }
  2682. template <typename Dispatch>
  2683. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
  2684. Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  2685. const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
  2686. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2687. Dispatch const & d ) const
  2688. {
  2689. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2690. VULKAN_HPP_NAMESPACE::Pipeline pipeline;
  2691. VkResult result = d.vkCreateComputePipelines(
  2692. m_device,
  2693. static_cast<VkPipelineCache>( pipelineCache ),
  2694. 1,
  2695. reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ),
  2696. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2697. reinterpret_cast<VkPipeline *>( &pipeline ) );
  2698. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2699. VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique",
  2700. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  2701. return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>(
  2702. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2703. UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  2704. }
  2705. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  2706. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2707. template <typename Dispatch>
  2708. VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  2709. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2710. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2711. {
  2712. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2713. d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2714. }
  2715. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2716. template <typename Dispatch>
  2717. VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  2718. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2719. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2720. {
  2721. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2722. d.vkDestroyPipeline( m_device,
  2723. static_cast<VkPipeline>( pipeline ),
  2724. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2725. }
  2726. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2727. template <typename Dispatch>
  2728. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  2729. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2730. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2731. {
  2732. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2733. d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2734. }
  2735. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2736. template <typename Dispatch>
  2737. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  2738. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2739. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2740. {
  2741. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2742. d.vkDestroyPipeline( m_device,
  2743. static_cast<VkPipeline>( pipeline ),
  2744. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2745. }
  2746. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2747. template <typename Dispatch>
  2748. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo,
  2749. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2750. VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout,
  2751. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2752. {
  2753. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2754. return static_cast<Result>( d.vkCreatePipelineLayout( m_device,
  2755. reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ),
  2756. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  2757. reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) );
  2758. }
  2759. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2760. template <typename Dispatch>
  2761. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type
  2762. Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,
  2763. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2764. Dispatch const & d ) const
  2765. {
  2766. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2767. VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
  2768. VkResult result =
  2769. d.vkCreatePipelineLayout( m_device,
  2770. reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
  2771. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2772. reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) );
  2773. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" );
  2774. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineLayout );
  2775. }
  2776. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  2777. template <typename Dispatch>
  2778. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type
  2779. Device::createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo,
  2780. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2781. Dispatch const & d ) const
  2782. {
  2783. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2784. VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
  2785. VkResult result =
  2786. d.vkCreatePipelineLayout( m_device,
  2787. reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ),
  2788. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2789. reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) );
  2790. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" );
  2791. return createResultValueType(
  2792. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2793. UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>( pipelineLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  2794. }
  2795. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  2796. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2797. template <typename Dispatch>
  2798. VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
  2799. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2800. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2801. {
  2802. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2803. d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2804. }
  2805. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2806. template <typename Dispatch>
  2807. VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
  2808. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2809. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2810. {
  2811. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2812. d.vkDestroyPipelineLayout( m_device,
  2813. static_cast<VkPipelineLayout>( pipelineLayout ),
  2814. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2815. }
  2816. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2817. template <typename Dispatch>
  2818. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
  2819. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2820. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2821. {
  2822. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2823. d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2824. }
  2825. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2826. template <typename Dispatch>
  2827. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout,
  2828. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2829. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2830. {
  2831. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2832. d.vkDestroyPipelineLayout( m_device,
  2833. static_cast<VkPipelineLayout>( pipelineLayout ),
  2834. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2835. }
  2836. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2837. template <typename Dispatch>
  2838. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo,
  2839. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2840. VULKAN_HPP_NAMESPACE::Sampler * pSampler,
  2841. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2842. {
  2843. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2844. return static_cast<Result>( d.vkCreateSampler( m_device,
  2845. reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ),
  2846. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  2847. reinterpret_cast<VkSampler *>( pSampler ) ) );
  2848. }
  2849. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2850. template <typename Dispatch>
  2851. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type Device::createSampler(
  2852. const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  2853. {
  2854. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2855. VULKAN_HPP_NAMESPACE::Sampler sampler;
  2856. VkResult result =
  2857. d.vkCreateSampler( m_device,
  2858. reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
  2859. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2860. reinterpret_cast<VkSampler *>( &sampler ) );
  2861. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" );
  2862. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), sampler );
  2863. }
  2864. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  2865. template <typename Dispatch>
  2866. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type Device::createSamplerUnique(
  2867. const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  2868. {
  2869. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2870. VULKAN_HPP_NAMESPACE::Sampler sampler;
  2871. VkResult result =
  2872. d.vkCreateSampler( m_device,
  2873. reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ),
  2874. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2875. reinterpret_cast<VkSampler *>( &sampler ) );
  2876. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" );
  2877. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2878. UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>( sampler, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  2879. }
  2880. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  2881. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2882. template <typename Dispatch>
  2883. VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler,
  2884. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2885. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2886. {
  2887. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2888. d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2889. }
  2890. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2891. template <typename Dispatch>
  2892. VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler,
  2893. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2894. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2895. {
  2896. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2897. d.vkDestroySampler( m_device,
  2898. static_cast<VkSampler>( sampler ),
  2899. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2900. }
  2901. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2902. template <typename Dispatch>
  2903. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
  2904. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2905. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2906. {
  2907. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2908. d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2909. }
  2910. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2911. template <typename Dispatch>
  2912. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler,
  2913. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2914. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2915. {
  2916. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2917. d.vkDestroySampler( m_device,
  2918. static_cast<VkSampler>( sampler ),
  2919. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2920. }
  2921. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2922. template <typename Dispatch>
  2923. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
  2924. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2925. VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout,
  2926. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2927. {
  2928. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2929. return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device,
  2930. reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ),
  2931. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  2932. reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) );
  2933. }
  2934. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2935. template <typename Dispatch>
  2936. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type
  2937. Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
  2938. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2939. Dispatch const & d ) const
  2940. {
  2941. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2942. VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
  2943. VkResult result = d.vkCreateDescriptorSetLayout(
  2944. m_device,
  2945. reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
  2946. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2947. reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) );
  2948. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" );
  2949. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), setLayout );
  2950. }
  2951. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  2952. template <typename Dispatch>
  2953. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type
  2954. Device::createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
  2955. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2956. Dispatch const & d ) const
  2957. {
  2958. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2959. VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
  2960. VkResult result = d.vkCreateDescriptorSetLayout(
  2961. m_device,
  2962. reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ),
  2963. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  2964. reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) );
  2965. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" );
  2966. return createResultValueType(
  2967. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  2968. UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>( setLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  2969. }
  2970. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  2971. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2972. template <typename Dispatch>
  2973. VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
  2974. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2975. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2976. {
  2977. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2978. d.vkDestroyDescriptorSetLayout(
  2979. m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  2980. }
  2981. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  2982. template <typename Dispatch>
  2983. VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
  2984. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  2985. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2986. {
  2987. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  2988. d.vkDestroyDescriptorSetLayout(
  2989. m_device,
  2990. static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
  2991. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  2992. }
  2993. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  2994. template <typename Dispatch>
  2995. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
  2996. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  2997. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  2998. {
  2999. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3000. d.vkDestroyDescriptorSetLayout(
  3001. m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  3002. }
  3003. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3004. template <typename Dispatch>
  3005. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout,
  3006. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3007. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3008. {
  3009. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3010. d.vkDestroyDescriptorSetLayout(
  3011. m_device,
  3012. static_cast<VkDescriptorSetLayout>( descriptorSetLayout ),
  3013. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  3014. }
  3015. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3016. template <typename Dispatch>
  3017. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo,
  3018. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3019. VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool,
  3020. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3021. {
  3022. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3023. return static_cast<Result>( d.vkCreateDescriptorPool( m_device,
  3024. reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ),
  3025. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  3026. reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) );
  3027. }
  3028. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3029. template <typename Dispatch>
  3030. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type
  3031. Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,
  3032. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3033. Dispatch const & d ) const
  3034. {
  3035. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3036. VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
  3037. VkResult result =
  3038. d.vkCreateDescriptorPool( m_device,
  3039. reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
  3040. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  3041. reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) );
  3042. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" );
  3043. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorPool );
  3044. }
  3045. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  3046. template <typename Dispatch>
  3047. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type
  3048. Device::createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo,
  3049. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3050. Dispatch const & d ) const
  3051. {
  3052. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3053. VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
  3054. VkResult result =
  3055. d.vkCreateDescriptorPool( m_device,
  3056. reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ),
  3057. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  3058. reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) );
  3059. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" );
  3060. return createResultValueType(
  3061. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  3062. UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>( descriptorPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  3063. }
  3064. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  3065. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3066. template <typename Dispatch>
  3067. VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3068. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3069. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3070. {
  3071. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3072. d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  3073. }
  3074. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3075. template <typename Dispatch>
  3076. VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3077. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3078. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3079. {
  3080. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3081. d.vkDestroyDescriptorPool( m_device,
  3082. static_cast<VkDescriptorPool>( descriptorPool ),
  3083. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  3084. }
  3085. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3086. template <typename Dispatch>
  3087. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3088. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3089. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3090. {
  3091. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3092. d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  3093. }
  3094. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3095. template <typename Dispatch>
  3096. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3097. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3098. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3099. {
  3100. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3101. d.vkDestroyDescriptorPool( m_device,
  3102. static_cast<VkDescriptorPool>( descriptorPool ),
  3103. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  3104. }
  3105. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3106. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3107. template <typename Dispatch>
  3108. VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3109. VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,
  3110. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3111. {
  3112. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3113. return static_cast<Result>(
  3114. d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
  3115. }
  3116. #else
  3117. template <typename Dispatch>
  3118. VULKAN_HPP_INLINE void Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3119. VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags,
  3120. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3121. {
  3122. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3123. d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) );
  3124. }
  3125. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3126. template <typename Dispatch>
  3127. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo,
  3128. VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
  3129. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3130. {
  3131. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3132. return static_cast<Result>( d.vkAllocateDescriptorSets(
  3133. m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) );
  3134. }
  3135. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3136. template <typename DescriptorSetAllocator, typename Dispatch>
  3137. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type
  3138. Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
  3139. {
  3140. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3141. std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount );
  3142. VkResult result = d.vkAllocateDescriptorSets(
  3143. m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
  3144. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
  3145. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorSets );
  3146. }
  3147. template <typename DescriptorSetAllocator,
  3148. typename Dispatch,
  3149. typename B0,
  3150. typename std::enable_if<std::is_same<typename B0::value_type, DescriptorSet>::value, int>::type>
  3151. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type
  3152. Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,
  3153. DescriptorSetAllocator & descriptorSetAllocator,
  3154. Dispatch const & d ) const
  3155. {
  3156. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3157. std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator );
  3158. VkResult result = d.vkAllocateDescriptorSets(
  3159. m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
  3160. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
  3161. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorSets );
  3162. }
  3163. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  3164. template <typename Dispatch, typename DescriptorSetAllocator>
  3165. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  3166. typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
  3167. Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
  3168. {
  3169. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3170. std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
  3171. VkResult result = d.vkAllocateDescriptorSets(
  3172. m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
  3173. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
  3174. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets;
  3175. uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
  3176. PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
  3177. for ( auto const & descriptorSet : descriptorSets )
  3178. {
  3179. uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) );
  3180. }
  3181. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueDescriptorSets ) );
  3182. }
  3183. template <typename Dispatch,
  3184. typename DescriptorSetAllocator,
  3185. typename B0,
  3186. typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, int>::type>
  3187. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  3188. typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type
  3189. Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo,
  3190. DescriptorSetAllocator & descriptorSetAllocator,
  3191. Dispatch const & d ) const
  3192. {
  3193. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3194. std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
  3195. VkResult result = d.vkAllocateDescriptorSets(
  3196. m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
  3197. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
  3198. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator );
  3199. uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
  3200. PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
  3201. for ( auto const & descriptorSet : descriptorSets )
  3202. {
  3203. uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) );
  3204. }
  3205. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueDescriptorSets ) );
  3206. }
  3207. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  3208. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3209. template <typename Dispatch>
  3210. VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3211. uint32_t descriptorSetCount,
  3212. const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
  3213. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3214. {
  3215. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3216. return static_cast<Result>( d.vkFreeDescriptorSets(
  3217. m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
  3218. }
  3219. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3220. template <typename Dispatch>
  3221. VULKAN_HPP_INLINE void Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3222. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
  3223. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3224. {
  3225. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3226. d.vkFreeDescriptorSets(
  3227. m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) );
  3228. }
  3229. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3230. template <typename Dispatch>
  3231. VULKAN_HPP_INLINE Result( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3232. uint32_t descriptorSetCount,
  3233. const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
  3234. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3235. {
  3236. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3237. return static_cast<Result>( d.vkFreeDescriptorSets(
  3238. m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
  3239. }
  3240. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3241. template <typename Dispatch>
  3242. VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool,
  3243. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
  3244. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3245. {
  3246. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3247. d.vkFreeDescriptorSets(
  3248. m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) );
  3249. }
  3250. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3251. template <typename Dispatch>
  3252. VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount,
  3253. const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
  3254. uint32_t descriptorCopyCount,
  3255. const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies,
  3256. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3257. {
  3258. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3259. d.vkUpdateDescriptorSets( m_device,
  3260. descriptorWriteCount,
  3261. reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ),
  3262. descriptorCopyCount,
  3263. reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) );
  3264. }
  3265. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3266. template <typename Dispatch>
  3267. VULKAN_HPP_INLINE void
  3268. Device::updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
  3269. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies,
  3270. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3271. {
  3272. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3273. d.vkUpdateDescriptorSets( m_device,
  3274. descriptorWrites.size(),
  3275. reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ),
  3276. descriptorCopies.size(),
  3277. reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) );
  3278. }
  3279. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3280. template <typename Dispatch>
  3281. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo,
  3282. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3283. VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer,
  3284. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3285. {
  3286. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3287. return static_cast<Result>( d.vkCreateFramebuffer( m_device,
  3288. reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ),
  3289. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  3290. reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) );
  3291. }
  3292. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3293. template <typename Dispatch>
  3294. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type
  3295. Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,
  3296. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3297. Dispatch const & d ) const
  3298. {
  3299. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3300. VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
  3301. VkResult result =
  3302. d.vkCreateFramebuffer( m_device,
  3303. reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
  3304. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  3305. reinterpret_cast<VkFramebuffer *>( &framebuffer ) );
  3306. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" );
  3307. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), framebuffer );
  3308. }
  3309. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  3310. template <typename Dispatch>
  3311. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type
  3312. Device::createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo,
  3313. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3314. Dispatch const & d ) const
  3315. {
  3316. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3317. VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
  3318. VkResult result =
  3319. d.vkCreateFramebuffer( m_device,
  3320. reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ),
  3321. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  3322. reinterpret_cast<VkFramebuffer *>( &framebuffer ) );
  3323. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" );
  3324. return createResultValueType(
  3325. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  3326. UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>( framebuffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  3327. }
  3328. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  3329. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3330. template <typename Dispatch>
  3331. VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
  3332. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3333. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3334. {
  3335. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3336. d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  3337. }
  3338. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3339. template <typename Dispatch>
  3340. VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
  3341. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3342. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3343. {
  3344. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3345. d.vkDestroyFramebuffer( m_device,
  3346. static_cast<VkFramebuffer>( framebuffer ),
  3347. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  3348. }
  3349. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3350. template <typename Dispatch>
  3351. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
  3352. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3353. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3354. {
  3355. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3356. d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  3357. }
  3358. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3359. template <typename Dispatch>
  3360. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
  3361. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3362. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3363. {
  3364. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3365. d.vkDestroyFramebuffer( m_device,
  3366. static_cast<VkFramebuffer>( framebuffer ),
  3367. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  3368. }
  3369. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3370. template <typename Dispatch>
  3371. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo,
  3372. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3373. VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
  3374. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3375. {
  3376. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3377. return static_cast<Result>( d.vkCreateRenderPass( m_device,
  3378. reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ),
  3379. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  3380. reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
  3381. }
  3382. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3383. template <typename Dispatch>
  3384. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
  3385. Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,
  3386. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3387. Dispatch const & d ) const
  3388. {
  3389. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3390. VULKAN_HPP_NAMESPACE::RenderPass renderPass;
  3391. VkResult result =
  3392. d.vkCreateRenderPass( m_device,
  3393. reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
  3394. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  3395. reinterpret_cast<VkRenderPass *>( &renderPass ) );
  3396. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" );
  3397. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
  3398. }
  3399. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  3400. template <typename Dispatch>
  3401. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
  3402. Device::createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo,
  3403. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3404. Dispatch const & d ) const
  3405. {
  3406. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3407. VULKAN_HPP_NAMESPACE::RenderPass renderPass;
  3408. VkResult result =
  3409. d.vkCreateRenderPass( m_device,
  3410. reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ),
  3411. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  3412. reinterpret_cast<VkRenderPass *>( &renderPass ) );
  3413. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" );
  3414. return createResultValueType(
  3415. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  3416. UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  3417. }
  3418. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  3419. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3420. template <typename Dispatch>
  3421. VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
  3422. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3423. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3424. {
  3425. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3426. d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  3427. }
  3428. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3429. template <typename Dispatch>
  3430. VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
  3431. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3432. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3433. {
  3434. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3435. d.vkDestroyRenderPass( m_device,
  3436. static_cast<VkRenderPass>( renderPass ),
  3437. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  3438. }
  3439. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3440. template <typename Dispatch>
  3441. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
  3442. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3443. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3444. {
  3445. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3446. d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  3447. }
  3448. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3449. template <typename Dispatch>
  3450. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
  3451. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3452. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3453. {
  3454. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3455. d.vkDestroyRenderPass( m_device,
  3456. static_cast<VkRenderPass>( renderPass ),
  3457. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  3458. }
  3459. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3460. template <typename Dispatch>
  3461. VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
  3462. VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
  3463. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3464. {
  3465. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3466. d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
  3467. }
  3468. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3469. template <typename Dispatch>
  3470. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass,
  3471. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3472. {
  3473. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3474. VULKAN_HPP_NAMESPACE::Extent2D granularity;
  3475. d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) );
  3476. return granularity;
  3477. }
  3478. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3479. template <typename Dispatch>
  3480. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo,
  3481. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3482. VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool,
  3483. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3484. {
  3485. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3486. return static_cast<Result>( d.vkCreateCommandPool( m_device,
  3487. reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ),
  3488. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  3489. reinterpret_cast<VkCommandPool *>( pCommandPool ) ) );
  3490. }
  3491. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3492. template <typename Dispatch>
  3493. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type
  3494. Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,
  3495. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3496. Dispatch const & d ) const
  3497. {
  3498. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3499. VULKAN_HPP_NAMESPACE::CommandPool commandPool;
  3500. VkResult result =
  3501. d.vkCreateCommandPool( m_device,
  3502. reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
  3503. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  3504. reinterpret_cast<VkCommandPool *>( &commandPool ) );
  3505. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" );
  3506. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandPool );
  3507. }
  3508. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  3509. template <typename Dispatch>
  3510. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type
  3511. Device::createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo,
  3512. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3513. Dispatch const & d ) const
  3514. {
  3515. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3516. VULKAN_HPP_NAMESPACE::CommandPool commandPool;
  3517. VkResult result =
  3518. d.vkCreateCommandPool( m_device,
  3519. reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ),
  3520. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  3521. reinterpret_cast<VkCommandPool *>( &commandPool ) );
  3522. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" );
  3523. return createResultValueType(
  3524. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  3525. UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>( commandPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  3526. }
  3527. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  3528. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3529. template <typename Dispatch>
  3530. VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  3531. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3532. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3533. {
  3534. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3535. d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  3536. }
  3537. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3538. template <typename Dispatch>
  3539. VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  3540. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3541. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3542. {
  3543. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3544. d.vkDestroyCommandPool( m_device,
  3545. static_cast<VkCommandPool>( commandPool ),
  3546. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  3547. }
  3548. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3549. template <typename Dispatch>
  3550. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  3551. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  3552. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3553. {
  3554. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3555. d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  3556. }
  3557. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3558. template <typename Dispatch>
  3559. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  3560. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  3561. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3562. {
  3563. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3564. d.vkDestroyCommandPool( m_device,
  3565. static_cast<VkCommandPool>( commandPool ),
  3566. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  3567. }
  3568. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3569. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3570. template <typename Dispatch>
  3571. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  3572. VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags,
  3573. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3574. {
  3575. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3576. return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
  3577. }
  3578. #else
  3579. template <typename Dispatch>
  3580. VULKAN_HPP_INLINE typename ResultValueType<void>::type
  3581. Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const
  3582. {
  3583. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3584. VkResult result = d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) );
  3585. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" );
  3586. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  3587. }
  3588. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3589. template <typename Dispatch>
  3590. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo,
  3591. VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
  3592. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3593. {
  3594. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3595. return static_cast<Result>( d.vkAllocateCommandBuffers(
  3596. m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) );
  3597. }
  3598. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3599. template <typename CommandBufferAllocator, typename Dispatch>
  3600. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type
  3601. Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
  3602. {
  3603. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3604. std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount );
  3605. VkResult result = d.vkAllocateCommandBuffers(
  3606. m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
  3607. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
  3608. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandBuffers );
  3609. }
  3610. template <typename CommandBufferAllocator,
  3611. typename Dispatch,
  3612. typename B0,
  3613. typename std::enable_if<std::is_same<typename B0::value_type, CommandBuffer>::value, int>::type>
  3614. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type
  3615. Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,
  3616. CommandBufferAllocator & commandBufferAllocator,
  3617. Dispatch const & d ) const
  3618. {
  3619. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3620. std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator );
  3621. VkResult result = d.vkAllocateCommandBuffers(
  3622. m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
  3623. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
  3624. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandBuffers );
  3625. }
  3626. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  3627. template <typename Dispatch, typename CommandBufferAllocator>
  3628. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  3629. typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
  3630. Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
  3631. {
  3632. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3633. std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
  3634. VkResult result = d.vkAllocateCommandBuffers(
  3635. m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
  3636. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
  3637. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers;
  3638. uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
  3639. PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
  3640. for ( auto const & commandBuffer : commandBuffers )
  3641. {
  3642. uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) );
  3643. }
  3644. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueCommandBuffers ) );
  3645. }
  3646. template <typename Dispatch,
  3647. typename CommandBufferAllocator,
  3648. typename B0,
  3649. typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, int>::type>
  3650. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  3651. typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type
  3652. Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo,
  3653. CommandBufferAllocator & commandBufferAllocator,
  3654. Dispatch const & d ) const
  3655. {
  3656. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3657. std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
  3658. VkResult result = d.vkAllocateCommandBuffers(
  3659. m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
  3660. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
  3661. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator );
  3662. uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
  3663. PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
  3664. for ( auto const & commandBuffer : commandBuffers )
  3665. {
  3666. uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) );
  3667. }
  3668. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueCommandBuffers ) );
  3669. }
  3670. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  3671. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3672. template <typename Dispatch>
  3673. VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  3674. uint32_t commandBufferCount,
  3675. const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
  3676. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3677. {
  3678. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3679. d.vkFreeCommandBuffers(
  3680. m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
  3681. }
  3682. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3683. template <typename Dispatch>
  3684. VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  3685. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
  3686. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3687. {
  3688. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3689. d.vkFreeCommandBuffers(
  3690. m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
  3691. }
  3692. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3693. template <typename Dispatch>
  3694. VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  3695. uint32_t commandBufferCount,
  3696. const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
  3697. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3698. {
  3699. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3700. d.vkFreeCommandBuffers(
  3701. m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
  3702. }
  3703. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3704. template <typename Dispatch>
  3705. VULKAN_HPP_INLINE void( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  3706. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
  3707. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3708. {
  3709. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3710. d.vkFreeCommandBuffers(
  3711. m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
  3712. }
  3713. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3714. template <typename Dispatch>
  3715. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo,
  3716. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3717. {
  3718. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3719. return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) );
  3720. }
  3721. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3722. template <typename Dispatch>
  3723. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  3724. CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const
  3725. {
  3726. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3727. VkResult result = d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) );
  3728. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
  3729. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  3730. }
  3731. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3732. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3733. template <typename Dispatch>
  3734. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3735. {
  3736. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3737. return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
  3738. }
  3739. #else
  3740. template <typename Dispatch>
  3741. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end( Dispatch const & d ) const
  3742. {
  3743. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3744. VkResult result = d.vkEndCommandBuffer( m_commandBuffer );
  3745. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
  3746. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  3747. }
  3748. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3749. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3750. template <typename Dispatch>
  3751. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags,
  3752. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3753. {
  3754. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3755. return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
  3756. }
  3757. #else
  3758. template <typename Dispatch>
  3759. VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const
  3760. {
  3761. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3762. VkResult result = d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) );
  3763. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
  3764. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  3765. }
  3766. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  3767. template <typename Dispatch>
  3768. VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  3769. VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  3770. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3771. {
  3772. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3773. d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
  3774. }
  3775. template <typename Dispatch>
  3776. VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport,
  3777. uint32_t viewportCount,
  3778. const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
  3779. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3780. {
  3781. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3782. d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
  3783. }
  3784. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3785. template <typename Dispatch>
  3786. VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport,
  3787. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
  3788. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3789. {
  3790. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3791. d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
  3792. }
  3793. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3794. template <typename Dispatch>
  3795. VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor,
  3796. uint32_t scissorCount,
  3797. const VULKAN_HPP_NAMESPACE::Rect2D * pScissors,
  3798. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3799. {
  3800. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3801. d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
  3802. }
  3803. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3804. template <typename Dispatch>
  3805. VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor,
  3806. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
  3807. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3808. {
  3809. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3810. d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
  3811. }
  3812. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3813. template <typename Dispatch>
  3814. VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3815. {
  3816. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3817. d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
  3818. }
  3819. template <typename Dispatch>
  3820. VULKAN_HPP_INLINE void
  3821. CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3822. {
  3823. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3824. d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
  3825. }
  3826. template <typename Dispatch>
  3827. VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3828. {
  3829. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3830. d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
  3831. }
  3832. template <typename Dispatch>
  3833. VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3834. {
  3835. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3836. d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
  3837. }
  3838. template <typename Dispatch>
  3839. VULKAN_HPP_INLINE void
  3840. CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3841. {
  3842. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3843. d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
  3844. }
  3845. template <typename Dispatch>
  3846. VULKAN_HPP_INLINE void
  3847. CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3848. {
  3849. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3850. d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
  3851. }
  3852. template <typename Dispatch>
  3853. VULKAN_HPP_INLINE void
  3854. CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3855. {
  3856. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3857. d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
  3858. }
  3859. template <typename Dispatch>
  3860. VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  3861. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  3862. uint32_t firstSet,
  3863. uint32_t descriptorSetCount,
  3864. const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets,
  3865. uint32_t dynamicOffsetCount,
  3866. const uint32_t * pDynamicOffsets,
  3867. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3868. {
  3869. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3870. d.vkCmdBindDescriptorSets( m_commandBuffer,
  3871. static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
  3872. static_cast<VkPipelineLayout>( layout ),
  3873. firstSet,
  3874. descriptorSetCount,
  3875. reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ),
  3876. dynamicOffsetCount,
  3877. pDynamicOffsets );
  3878. }
  3879. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3880. template <typename Dispatch>
  3881. VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  3882. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  3883. uint32_t firstSet,
  3884. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets,
  3885. VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets,
  3886. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3887. {
  3888. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3889. d.vkCmdBindDescriptorSets( m_commandBuffer,
  3890. static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
  3891. static_cast<VkPipelineLayout>( layout ),
  3892. firstSet,
  3893. descriptorSets.size(),
  3894. reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ),
  3895. dynamicOffsets.size(),
  3896. dynamicOffsets.data() );
  3897. }
  3898. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3899. template <typename Dispatch>
  3900. VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer,
  3901. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  3902. VULKAN_HPP_NAMESPACE::IndexType indexType,
  3903. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3904. {
  3905. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3906. d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) );
  3907. }
  3908. template <typename Dispatch>
  3909. VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding,
  3910. uint32_t bindingCount,
  3911. const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
  3912. const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
  3913. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3914. {
  3915. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3916. d.vkCmdBindVertexBuffers(
  3917. m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
  3918. }
  3919. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  3920. template <typename Dispatch>
  3921. VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding,
  3922. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
  3923. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
  3924. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  3925. {
  3926. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3927. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  3928. VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
  3929. # else
  3930. if ( buffers.size() != offsets.size() )
  3931. {
  3932. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
  3933. }
  3934. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  3935. d.vkCmdBindVertexBuffers( m_commandBuffer,
  3936. firstBinding,
  3937. buffers.size(),
  3938. reinterpret_cast<const VkBuffer *>( buffers.data() ),
  3939. reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
  3940. }
  3941. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  3942. template <typename Dispatch>
  3943. VULKAN_HPP_INLINE void CommandBuffer::draw(
  3944. uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3945. {
  3946. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3947. d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
  3948. }
  3949. template <typename Dispatch>
  3950. VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount,
  3951. uint32_t instanceCount,
  3952. uint32_t firstIndex,
  3953. int32_t vertexOffset,
  3954. uint32_t firstInstance,
  3955. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3956. {
  3957. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3958. d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
  3959. }
  3960. template <typename Dispatch>
  3961. VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
  3962. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  3963. uint32_t drawCount,
  3964. uint32_t stride,
  3965. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3966. {
  3967. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3968. d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
  3969. }
  3970. template <typename Dispatch>
  3971. VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
  3972. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  3973. uint32_t drawCount,
  3974. uint32_t stride,
  3975. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3976. {
  3977. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3978. d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
  3979. }
  3980. template <typename Dispatch>
  3981. VULKAN_HPP_INLINE void
  3982. CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3983. {
  3984. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3985. d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
  3986. }
  3987. template <typename Dispatch>
  3988. VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer,
  3989. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  3990. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  3991. {
  3992. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  3993. d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
  3994. }
  3995. template <typename Dispatch>
  3996. VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
  3997. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  3998. uint32_t regionCount,
  3999. const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions,
  4000. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4001. {
  4002. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4003. d.vkCmdCopyBuffer( m_commandBuffer,
  4004. static_cast<VkBuffer>( srcBuffer ),
  4005. static_cast<VkBuffer>( dstBuffer ),
  4006. regionCount,
  4007. reinterpret_cast<const VkBufferCopy *>( pRegions ) );
  4008. }
  4009. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4010. template <typename Dispatch>
  4011. VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
  4012. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4013. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions,
  4014. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4015. {
  4016. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4017. d.vkCmdCopyBuffer( m_commandBuffer,
  4018. static_cast<VkBuffer>( srcBuffer ),
  4019. static_cast<VkBuffer>( dstBuffer ),
  4020. regions.size(),
  4021. reinterpret_cast<const VkBufferCopy *>( regions.data() ) );
  4022. }
  4023. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4024. template <typename Dispatch>
  4025. VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
  4026. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4027. VULKAN_HPP_NAMESPACE::Image dstImage,
  4028. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4029. uint32_t regionCount,
  4030. const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions,
  4031. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4032. {
  4033. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4034. d.vkCmdCopyImage( m_commandBuffer,
  4035. static_cast<VkImage>( srcImage ),
  4036. static_cast<VkImageLayout>( srcImageLayout ),
  4037. static_cast<VkImage>( dstImage ),
  4038. static_cast<VkImageLayout>( dstImageLayout ),
  4039. regionCount,
  4040. reinterpret_cast<const VkImageCopy *>( pRegions ) );
  4041. }
  4042. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4043. template <typename Dispatch>
  4044. VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage,
  4045. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4046. VULKAN_HPP_NAMESPACE::Image dstImage,
  4047. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4048. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions,
  4049. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4050. {
  4051. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4052. d.vkCmdCopyImage( m_commandBuffer,
  4053. static_cast<VkImage>( srcImage ),
  4054. static_cast<VkImageLayout>( srcImageLayout ),
  4055. static_cast<VkImage>( dstImage ),
  4056. static_cast<VkImageLayout>( dstImageLayout ),
  4057. regions.size(),
  4058. reinterpret_cast<const VkImageCopy *>( regions.data() ) );
  4059. }
  4060. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4061. template <typename Dispatch>
  4062. VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
  4063. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4064. VULKAN_HPP_NAMESPACE::Image dstImage,
  4065. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4066. uint32_t regionCount,
  4067. const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions,
  4068. VULKAN_HPP_NAMESPACE::Filter filter,
  4069. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4070. {
  4071. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4072. d.vkCmdBlitImage( m_commandBuffer,
  4073. static_cast<VkImage>( srcImage ),
  4074. static_cast<VkImageLayout>( srcImageLayout ),
  4075. static_cast<VkImage>( dstImage ),
  4076. static_cast<VkImageLayout>( dstImageLayout ),
  4077. regionCount,
  4078. reinterpret_cast<const VkImageBlit *>( pRegions ),
  4079. static_cast<VkFilter>( filter ) );
  4080. }
  4081. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4082. template <typename Dispatch>
  4083. VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage,
  4084. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4085. VULKAN_HPP_NAMESPACE::Image dstImage,
  4086. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4087. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions,
  4088. VULKAN_HPP_NAMESPACE::Filter filter,
  4089. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4090. {
  4091. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4092. d.vkCmdBlitImage( m_commandBuffer,
  4093. static_cast<VkImage>( srcImage ),
  4094. static_cast<VkImageLayout>( srcImageLayout ),
  4095. static_cast<VkImage>( dstImage ),
  4096. static_cast<VkImageLayout>( dstImageLayout ),
  4097. regions.size(),
  4098. reinterpret_cast<const VkImageBlit *>( regions.data() ),
  4099. static_cast<VkFilter>( filter ) );
  4100. }
  4101. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4102. template <typename Dispatch>
  4103. VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
  4104. VULKAN_HPP_NAMESPACE::Image dstImage,
  4105. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4106. uint32_t regionCount,
  4107. const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
  4108. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4109. {
  4110. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4111. d.vkCmdCopyBufferToImage( m_commandBuffer,
  4112. static_cast<VkBuffer>( srcBuffer ),
  4113. static_cast<VkImage>( dstImage ),
  4114. static_cast<VkImageLayout>( dstImageLayout ),
  4115. regionCount,
  4116. reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
  4117. }
  4118. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4119. template <typename Dispatch>
  4120. VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer,
  4121. VULKAN_HPP_NAMESPACE::Image dstImage,
  4122. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4123. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
  4124. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4125. {
  4126. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4127. d.vkCmdCopyBufferToImage( m_commandBuffer,
  4128. static_cast<VkBuffer>( srcBuffer ),
  4129. static_cast<VkImage>( dstImage ),
  4130. static_cast<VkImageLayout>( dstImageLayout ),
  4131. regions.size(),
  4132. reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
  4133. }
  4134. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4135. template <typename Dispatch>
  4136. VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
  4137. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4138. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4139. uint32_t regionCount,
  4140. const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions,
  4141. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4142. {
  4143. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4144. d.vkCmdCopyImageToBuffer( m_commandBuffer,
  4145. static_cast<VkImage>( srcImage ),
  4146. static_cast<VkImageLayout>( srcImageLayout ),
  4147. static_cast<VkBuffer>( dstBuffer ),
  4148. regionCount,
  4149. reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
  4150. }
  4151. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4152. template <typename Dispatch>
  4153. VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage,
  4154. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4155. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4156. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions,
  4157. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4158. {
  4159. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4160. d.vkCmdCopyImageToBuffer( m_commandBuffer,
  4161. static_cast<VkImage>( srcImage ),
  4162. static_cast<VkImageLayout>( srcImageLayout ),
  4163. static_cast<VkBuffer>( dstBuffer ),
  4164. regions.size(),
  4165. reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
  4166. }
  4167. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4168. template <typename Dispatch>
  4169. VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4170. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  4171. VULKAN_HPP_NAMESPACE::DeviceSize dataSize,
  4172. const void * pData,
  4173. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4174. {
  4175. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4176. d.vkCmdUpdateBuffer(
  4177. m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData );
  4178. }
  4179. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4180. template <typename DataType, typename Dispatch>
  4181. VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4182. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  4183. VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data,
  4184. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4185. {
  4186. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4187. d.vkCmdUpdateBuffer( m_commandBuffer,
  4188. static_cast<VkBuffer>( dstBuffer ),
  4189. static_cast<VkDeviceSize>( dstOffset ),
  4190. data.size() * sizeof( DataType ),
  4191. reinterpret_cast<const void *>( data.data() ) );
  4192. }
  4193. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4194. template <typename Dispatch>
  4195. VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4196. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  4197. VULKAN_HPP_NAMESPACE::DeviceSize size,
  4198. uint32_t data,
  4199. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4200. {
  4201. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4202. d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data );
  4203. }
  4204. template <typename Dispatch>
  4205. VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
  4206. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
  4207. const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor,
  4208. uint32_t rangeCount,
  4209. const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,
  4210. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4211. {
  4212. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4213. d.vkCmdClearColorImage( m_commandBuffer,
  4214. static_cast<VkImage>( image ),
  4215. static_cast<VkImageLayout>( imageLayout ),
  4216. reinterpret_cast<const VkClearColorValue *>( pColor ),
  4217. rangeCount,
  4218. reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
  4219. }
  4220. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4221. template <typename Dispatch>
  4222. VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image,
  4223. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
  4224. const VULKAN_HPP_NAMESPACE::ClearColorValue & color,
  4225. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
  4226. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4227. {
  4228. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4229. d.vkCmdClearColorImage( m_commandBuffer,
  4230. static_cast<VkImage>( image ),
  4231. static_cast<VkImageLayout>( imageLayout ),
  4232. reinterpret_cast<const VkClearColorValue *>( &color ),
  4233. ranges.size(),
  4234. reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
  4235. }
  4236. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4237. template <typename Dispatch>
  4238. VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
  4239. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
  4240. const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil,
  4241. uint32_t rangeCount,
  4242. const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges,
  4243. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4244. {
  4245. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4246. d.vkCmdClearDepthStencilImage( m_commandBuffer,
  4247. static_cast<VkImage>( image ),
  4248. static_cast<VkImageLayout>( imageLayout ),
  4249. reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ),
  4250. rangeCount,
  4251. reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
  4252. }
  4253. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4254. template <typename Dispatch>
  4255. VULKAN_HPP_INLINE void
  4256. CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image,
  4257. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
  4258. const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil,
  4259. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges,
  4260. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4261. {
  4262. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4263. d.vkCmdClearDepthStencilImage( m_commandBuffer,
  4264. static_cast<VkImage>( image ),
  4265. static_cast<VkImageLayout>( imageLayout ),
  4266. reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ),
  4267. ranges.size(),
  4268. reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
  4269. }
  4270. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4271. template <typename Dispatch>
  4272. VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount,
  4273. const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments,
  4274. uint32_t rectCount,
  4275. const VULKAN_HPP_NAMESPACE::ClearRect * pRects,
  4276. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4277. {
  4278. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4279. d.vkCmdClearAttachments( m_commandBuffer,
  4280. attachmentCount,
  4281. reinterpret_cast<const VkClearAttachment *>( pAttachments ),
  4282. rectCount,
  4283. reinterpret_cast<const VkClearRect *>( pRects ) );
  4284. }
  4285. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4286. template <typename Dispatch>
  4287. VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments,
  4288. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects,
  4289. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4290. {
  4291. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4292. d.vkCmdClearAttachments( m_commandBuffer,
  4293. attachments.size(),
  4294. reinterpret_cast<const VkClearAttachment *>( attachments.data() ),
  4295. rects.size(),
  4296. reinterpret_cast<const VkClearRect *>( rects.data() ) );
  4297. }
  4298. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4299. template <typename Dispatch>
  4300. VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
  4301. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4302. VULKAN_HPP_NAMESPACE::Image dstImage,
  4303. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4304. uint32_t regionCount,
  4305. const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions,
  4306. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4307. {
  4308. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4309. d.vkCmdResolveImage( m_commandBuffer,
  4310. static_cast<VkImage>( srcImage ),
  4311. static_cast<VkImageLayout>( srcImageLayout ),
  4312. static_cast<VkImage>( dstImage ),
  4313. static_cast<VkImageLayout>( dstImageLayout ),
  4314. regionCount,
  4315. reinterpret_cast<const VkImageResolve *>( pRegions ) );
  4316. }
  4317. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4318. template <typename Dispatch>
  4319. VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage,
  4320. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout,
  4321. VULKAN_HPP_NAMESPACE::Image dstImage,
  4322. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  4323. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions,
  4324. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4325. {
  4326. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4327. d.vkCmdResolveImage( m_commandBuffer,
  4328. static_cast<VkImage>( srcImage ),
  4329. static_cast<VkImageLayout>( srcImageLayout ),
  4330. static_cast<VkImage>( dstImage ),
  4331. static_cast<VkImageLayout>( dstImageLayout ),
  4332. regions.size(),
  4333. reinterpret_cast<const VkImageResolve *>( regions.data() ) );
  4334. }
  4335. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4336. template <typename Dispatch>
  4337. VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event,
  4338. VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
  4339. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4340. {
  4341. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4342. d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
  4343. }
  4344. template <typename Dispatch>
  4345. VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event,
  4346. VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask,
  4347. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4348. {
  4349. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4350. d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
  4351. }
  4352. template <typename Dispatch>
  4353. VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount,
  4354. const VULKAN_HPP_NAMESPACE::Event * pEvents,
  4355. VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
  4356. VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
  4357. uint32_t memoryBarrierCount,
  4358. const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,
  4359. uint32_t bufferMemoryBarrierCount,
  4360. const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
  4361. uint32_t imageMemoryBarrierCount,
  4362. const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
  4363. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4364. {
  4365. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4366. d.vkCmdWaitEvents( m_commandBuffer,
  4367. eventCount,
  4368. reinterpret_cast<const VkEvent *>( pEvents ),
  4369. static_cast<VkPipelineStageFlags>( srcStageMask ),
  4370. static_cast<VkPipelineStageFlags>( dstStageMask ),
  4371. memoryBarrierCount,
  4372. reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ),
  4373. bufferMemoryBarrierCount,
  4374. reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ),
  4375. imageMemoryBarrierCount,
  4376. reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
  4377. }
  4378. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4379. template <typename Dispatch>
  4380. VULKAN_HPP_INLINE void
  4381. CommandBuffer::waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
  4382. VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
  4383. VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
  4384. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
  4385. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
  4386. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
  4387. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4388. {
  4389. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4390. d.vkCmdWaitEvents( m_commandBuffer,
  4391. events.size(),
  4392. reinterpret_cast<const VkEvent *>( events.data() ),
  4393. static_cast<VkPipelineStageFlags>( srcStageMask ),
  4394. static_cast<VkPipelineStageFlags>( dstStageMask ),
  4395. memoryBarriers.size(),
  4396. reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ),
  4397. bufferMemoryBarriers.size(),
  4398. reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ),
  4399. imageMemoryBarriers.size(),
  4400. reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
  4401. }
  4402. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4403. template <typename Dispatch>
  4404. VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
  4405. VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
  4406. VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
  4407. uint32_t memoryBarrierCount,
  4408. const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers,
  4409. uint32_t bufferMemoryBarrierCount,
  4410. const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers,
  4411. uint32_t imageMemoryBarrierCount,
  4412. const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers,
  4413. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4414. {
  4415. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4416. d.vkCmdPipelineBarrier( m_commandBuffer,
  4417. static_cast<VkPipelineStageFlags>( srcStageMask ),
  4418. static_cast<VkPipelineStageFlags>( dstStageMask ),
  4419. static_cast<VkDependencyFlags>( dependencyFlags ),
  4420. memoryBarrierCount,
  4421. reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ),
  4422. bufferMemoryBarrierCount,
  4423. reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ),
  4424. imageMemoryBarrierCount,
  4425. reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
  4426. }
  4427. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4428. template <typename Dispatch>
  4429. VULKAN_HPP_INLINE void
  4430. CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask,
  4431. VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask,
  4432. VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags,
  4433. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers,
  4434. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers,
  4435. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers,
  4436. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4437. {
  4438. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4439. d.vkCmdPipelineBarrier( m_commandBuffer,
  4440. static_cast<VkPipelineStageFlags>( srcStageMask ),
  4441. static_cast<VkPipelineStageFlags>( dstStageMask ),
  4442. static_cast<VkDependencyFlags>( dependencyFlags ),
  4443. memoryBarriers.size(),
  4444. reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ),
  4445. bufferMemoryBarriers.size(),
  4446. reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ),
  4447. imageMemoryBarriers.size(),
  4448. reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
  4449. }
  4450. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4451. template <typename Dispatch>
  4452. VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  4453. uint32_t query,
  4454. VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
  4455. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4456. {
  4457. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4458. d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
  4459. }
  4460. template <typename Dispatch>
  4461. VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4462. {
  4463. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4464. d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
  4465. }
  4466. template <typename Dispatch>
  4467. VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  4468. uint32_t firstQuery,
  4469. uint32_t queryCount,
  4470. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4471. {
  4472. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4473. d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
  4474. }
  4475. template <typename Dispatch>
  4476. VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
  4477. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  4478. uint32_t query,
  4479. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4480. {
  4481. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4482. d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
  4483. }
  4484. template <typename Dispatch>
  4485. VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  4486. uint32_t firstQuery,
  4487. uint32_t queryCount,
  4488. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  4489. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  4490. VULKAN_HPP_NAMESPACE::DeviceSize stride,
  4491. VULKAN_HPP_NAMESPACE::QueryResultFlags flags,
  4492. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4493. {
  4494. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4495. d.vkCmdCopyQueryPoolResults( m_commandBuffer,
  4496. static_cast<VkQueryPool>( queryPool ),
  4497. firstQuery,
  4498. queryCount,
  4499. static_cast<VkBuffer>( dstBuffer ),
  4500. static_cast<VkDeviceSize>( dstOffset ),
  4501. static_cast<VkDeviceSize>( stride ),
  4502. static_cast<VkQueryResultFlags>( flags ) );
  4503. }
  4504. template <typename Dispatch>
  4505. VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  4506. VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
  4507. uint32_t offset,
  4508. uint32_t size,
  4509. const void * pValues,
  4510. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4511. {
  4512. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4513. d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
  4514. }
  4515. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4516. template <typename ValuesType, typename Dispatch>
  4517. VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  4518. VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags,
  4519. uint32_t offset,
  4520. VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values,
  4521. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4522. {
  4523. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4524. d.vkCmdPushConstants( m_commandBuffer,
  4525. static_cast<VkPipelineLayout>( layout ),
  4526. static_cast<VkShaderStageFlags>( stageFlags ),
  4527. offset,
  4528. values.size() * sizeof( ValuesType ),
  4529. reinterpret_cast<const void *>( values.data() ) );
  4530. }
  4531. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4532. template <typename Dispatch>
  4533. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
  4534. VULKAN_HPP_NAMESPACE::SubpassContents contents,
  4535. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4536. {
  4537. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4538. d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
  4539. }
  4540. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4541. template <typename Dispatch>
  4542. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
  4543. VULKAN_HPP_NAMESPACE::SubpassContents contents,
  4544. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4545. {
  4546. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4547. d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
  4548. }
  4549. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4550. template <typename Dispatch>
  4551. VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4552. {
  4553. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4554. d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
  4555. }
  4556. template <typename Dispatch>
  4557. VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4558. {
  4559. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4560. d.vkCmdEndRenderPass( m_commandBuffer );
  4561. }
  4562. template <typename Dispatch>
  4563. VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount,
  4564. const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers,
  4565. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4566. {
  4567. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4568. d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
  4569. }
  4570. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4571. template <typename Dispatch>
  4572. VULKAN_HPP_INLINE void CommandBuffer::executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers,
  4573. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4574. {
  4575. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4576. d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
  4577. }
  4578. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4579. //=== VK_VERSION_1_1 ===
  4580. template <typename Dispatch>
  4581. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, Dispatch const & d ) VULKAN_HPP_NOEXCEPT
  4582. {
  4583. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4584. return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) );
  4585. }
  4586. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4587. template <typename Dispatch>
  4588. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d )
  4589. {
  4590. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4591. uint32_t apiVersion;
  4592. VkResult result = d.vkEnumerateInstanceVersion( &apiVersion );
  4593. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" );
  4594. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), apiVersion );
  4595. }
  4596. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4597. template <typename Dispatch>
  4598. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount,
  4599. const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
  4600. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4601. {
  4602. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4603. return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
  4604. }
  4605. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4606. template <typename Dispatch>
  4607. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  4608. Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const
  4609. {
  4610. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4611. VkResult result = d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) );
  4612. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
  4613. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  4614. }
  4615. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4616. template <typename Dispatch>
  4617. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount,
  4618. const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
  4619. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4620. {
  4621. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4622. return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
  4623. }
  4624. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4625. template <typename Dispatch>
  4626. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  4627. Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const
  4628. {
  4629. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4630. VkResult result = d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) );
  4631. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
  4632. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  4633. }
  4634. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4635. template <typename Dispatch>
  4636. VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex,
  4637. uint32_t localDeviceIndex,
  4638. uint32_t remoteDeviceIndex,
  4639. VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
  4640. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4641. {
  4642. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4643. d.vkGetDeviceGroupPeerMemoryFeatures(
  4644. m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
  4645. }
  4646. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4647. template <typename Dispatch>
  4648. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures(
  4649. uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4650. {
  4651. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4652. VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
  4653. d.vkGetDeviceGroupPeerMemoryFeatures(
  4654. m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
  4655. return peerMemoryFeatures;
  4656. }
  4657. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4658. template <typename Dispatch>
  4659. VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4660. {
  4661. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4662. d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
  4663. }
  4664. template <typename Dispatch>
  4665. VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX,
  4666. uint32_t baseGroupY,
  4667. uint32_t baseGroupZ,
  4668. uint32_t groupCountX,
  4669. uint32_t groupCountY,
  4670. uint32_t groupCountZ,
  4671. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4672. {
  4673. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4674. d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
  4675. }
  4676. template <typename Dispatch>
  4677. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  4678. Instance::enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount,
  4679. VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
  4680. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4681. {
  4682. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4683. return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups(
  4684. m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
  4685. }
  4686. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4687. template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
  4688. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  4689. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
  4690. Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const
  4691. {
  4692. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4693. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
  4694. uint32_t physicalDeviceGroupCount;
  4695. VkResult result;
  4696. do
  4697. {
  4698. result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr );
  4699. if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
  4700. {
  4701. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  4702. result = d.vkEnumeratePhysicalDeviceGroups(
  4703. m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
  4704. }
  4705. } while ( result == VK_INCOMPLETE );
  4706. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
  4707. VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
  4708. if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
  4709. {
  4710. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  4711. }
  4712. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
  4713. }
  4714. template <typename PhysicalDeviceGroupPropertiesAllocator,
  4715. typename Dispatch,
  4716. typename B1,
  4717. typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
  4718. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  4719. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
  4720. Instance::enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
  4721. {
  4722. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4723. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
  4724. physicalDeviceGroupPropertiesAllocator );
  4725. uint32_t physicalDeviceGroupCount;
  4726. VkResult result;
  4727. do
  4728. {
  4729. result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr );
  4730. if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
  4731. {
  4732. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  4733. result = d.vkEnumeratePhysicalDeviceGroups(
  4734. m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
  4735. }
  4736. } while ( result == VK_INCOMPLETE );
  4737. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
  4738. VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
  4739. if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
  4740. {
  4741. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  4742. }
  4743. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
  4744. }
  4745. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4746. template <typename Dispatch>
  4747. VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
  4748. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  4749. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4750. {
  4751. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4752. d.vkGetImageMemoryRequirements2(
  4753. m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  4754. }
  4755. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4756. template <typename Dispatch>
  4757. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  4758. Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4759. {
  4760. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4761. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  4762. d.vkGetImageMemoryRequirements2(
  4763. m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  4764. return memoryRequirements;
  4765. }
  4766. template <typename X, typename Y, typename... Z, typename Dispatch>
  4767. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  4768. Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4769. {
  4770. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4771. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  4772. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  4773. d.vkGetImageMemoryRequirements2(
  4774. m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  4775. return structureChain;
  4776. }
  4777. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4778. template <typename Dispatch>
  4779. VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
  4780. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  4781. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4782. {
  4783. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4784. d.vkGetBufferMemoryRequirements2(
  4785. m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  4786. }
  4787. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4788. template <typename Dispatch>
  4789. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  4790. Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4791. {
  4792. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4793. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  4794. d.vkGetBufferMemoryRequirements2(
  4795. m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  4796. return memoryRequirements;
  4797. }
  4798. template <typename X, typename Y, typename... Z, typename Dispatch>
  4799. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  4800. Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4801. {
  4802. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4803. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  4804. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  4805. d.vkGetBufferMemoryRequirements2(
  4806. m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  4807. return structureChain;
  4808. }
  4809. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4810. template <typename Dispatch>
  4811. VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
  4812. uint32_t * pSparseMemoryRequirementCount,
  4813. VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
  4814. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4815. {
  4816. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4817. d.vkGetImageSparseMemoryRequirements2( m_device,
  4818. reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
  4819. pSparseMemoryRequirementCount,
  4820. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
  4821. }
  4822. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4823. template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
  4824. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
  4825. Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const
  4826. {
  4827. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4828. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
  4829. uint32_t sparseMemoryRequirementCount;
  4830. d.vkGetImageSparseMemoryRequirements2(
  4831. m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
  4832. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  4833. d.vkGetImageSparseMemoryRequirements2( m_device,
  4834. reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
  4835. &sparseMemoryRequirementCount,
  4836. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  4837. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  4838. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  4839. {
  4840. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  4841. }
  4842. return sparseMemoryRequirements;
  4843. }
  4844. template <typename SparseImageMemoryRequirements2Allocator,
  4845. typename Dispatch,
  4846. typename B1,
  4847. typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type>
  4848. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
  4849. Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
  4850. SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
  4851. Dispatch const & d ) const
  4852. {
  4853. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4854. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
  4855. sparseImageMemoryRequirements2Allocator );
  4856. uint32_t sparseMemoryRequirementCount;
  4857. d.vkGetImageSparseMemoryRequirements2(
  4858. m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
  4859. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  4860. d.vkGetImageSparseMemoryRequirements2( m_device,
  4861. reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
  4862. &sparseMemoryRequirementCount,
  4863. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  4864. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  4865. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  4866. {
  4867. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  4868. }
  4869. return sparseMemoryRequirements;
  4870. }
  4871. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4872. template <typename Dispatch>
  4873. VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4874. {
  4875. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4876. d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
  4877. }
  4878. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4879. template <typename Dispatch>
  4880. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
  4881. PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4882. {
  4883. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4884. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
  4885. d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
  4886. return features;
  4887. }
  4888. template <typename X, typename Y, typename... Z, typename Dispatch>
  4889. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  4890. PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4891. {
  4892. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4893. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  4894. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
  4895. d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
  4896. return structureChain;
  4897. }
  4898. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4899. template <typename Dispatch>
  4900. VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
  4901. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4902. {
  4903. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4904. d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
  4905. }
  4906. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4907. template <typename Dispatch>
  4908. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
  4909. PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4910. {
  4911. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4912. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
  4913. d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
  4914. return properties;
  4915. }
  4916. template <typename X, typename Y, typename... Z, typename Dispatch>
  4917. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  4918. PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4919. {
  4920. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4921. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  4922. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
  4923. d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
  4924. return structureChain;
  4925. }
  4926. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4927. template <typename Dispatch>
  4928. VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format,
  4929. VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
  4930. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4931. {
  4932. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4933. d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
  4934. }
  4935. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4936. template <typename Dispatch>
  4937. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
  4938. PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4939. {
  4940. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4941. VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
  4942. d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
  4943. return formatProperties;
  4944. }
  4945. template <typename X, typename Y, typename... Z, typename Dispatch>
  4946. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  4947. PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4948. {
  4949. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4950. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  4951. VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
  4952. d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
  4953. return structureChain;
  4954. }
  4955. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4956. template <typename Dispatch>
  4957. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  4958. PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
  4959. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
  4960. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4961. {
  4962. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4963. return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
  4964. reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
  4965. reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
  4966. }
  4967. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  4968. template <typename Dispatch>
  4969. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
  4970. PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
  4971. {
  4972. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4973. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
  4974. VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
  4975. reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
  4976. reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
  4977. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
  4978. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
  4979. }
  4980. template <typename X, typename Y, typename... Z, typename Dispatch>
  4981. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type
  4982. PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
  4983. {
  4984. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  4985. StructureChain<X, Y, Z...> structureChain;
  4986. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
  4987. VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice,
  4988. reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
  4989. reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
  4990. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
  4991. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
  4992. }
  4993. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  4994. template <typename Dispatch>
  4995. VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount,
  4996. VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
  4997. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  4998. {
  4999. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5000. d.vkGetPhysicalDeviceQueueFamilyProperties2(
  5001. m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
  5002. }
  5003. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5004. template <typename QueueFamilyProperties2Allocator, typename Dispatch>
  5005. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
  5006. PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
  5007. {
  5008. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5009. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
  5010. uint32_t queueFamilyPropertyCount;
  5011. d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  5012. queueFamilyProperties.resize( queueFamilyPropertyCount );
  5013. d.vkGetPhysicalDeviceQueueFamilyProperties2(
  5014. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  5015. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  5016. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  5017. {
  5018. queueFamilyProperties.resize( queueFamilyPropertyCount );
  5019. }
  5020. return queueFamilyProperties;
  5021. }
  5022. template <typename QueueFamilyProperties2Allocator,
  5023. typename Dispatch,
  5024. typename B1,
  5025. typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties2>::value, int>::type>
  5026. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
  5027. PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const
  5028. {
  5029. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5030. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator );
  5031. uint32_t queueFamilyPropertyCount;
  5032. d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  5033. queueFamilyProperties.resize( queueFamilyPropertyCount );
  5034. d.vkGetPhysicalDeviceQueueFamilyProperties2(
  5035. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  5036. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  5037. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  5038. {
  5039. queueFamilyProperties.resize( queueFamilyPropertyCount );
  5040. }
  5041. return queueFamilyProperties;
  5042. }
  5043. template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
  5044. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
  5045. PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
  5046. {
  5047. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5048. std::vector<StructureChain, StructureChainAllocator> structureChains;
  5049. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
  5050. uint32_t queueFamilyPropertyCount;
  5051. d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  5052. structureChains.resize( queueFamilyPropertyCount );
  5053. queueFamilyProperties.resize( queueFamilyPropertyCount );
  5054. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  5055. {
  5056. queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
  5057. }
  5058. d.vkGetPhysicalDeviceQueueFamilyProperties2(
  5059. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  5060. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  5061. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  5062. {
  5063. structureChains.resize( queueFamilyPropertyCount );
  5064. }
  5065. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  5066. {
  5067. structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
  5068. }
  5069. return structureChains;
  5070. }
  5071. template <typename StructureChain,
  5072. typename StructureChainAllocator,
  5073. typename Dispatch,
  5074. typename B1,
  5075. typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type>
  5076. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
  5077. PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const
  5078. {
  5079. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5080. std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
  5081. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
  5082. uint32_t queueFamilyPropertyCount;
  5083. d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  5084. structureChains.resize( queueFamilyPropertyCount );
  5085. queueFamilyProperties.resize( queueFamilyPropertyCount );
  5086. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  5087. {
  5088. queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
  5089. }
  5090. d.vkGetPhysicalDeviceQueueFamilyProperties2(
  5091. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  5092. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  5093. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  5094. {
  5095. structureChains.resize( queueFamilyPropertyCount );
  5096. }
  5097. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  5098. {
  5099. structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
  5100. }
  5101. return structureChains;
  5102. }
  5103. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5104. template <typename Dispatch>
  5105. VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
  5106. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5107. {
  5108. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5109. d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
  5110. }
  5111. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5112. template <typename Dispatch>
  5113. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
  5114. PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5115. {
  5116. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5117. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
  5118. d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
  5119. return memoryProperties;
  5120. }
  5121. template <typename X, typename Y, typename... Z, typename Dispatch>
  5122. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  5123. PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5124. {
  5125. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5126. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  5127. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
  5128. structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
  5129. d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
  5130. return structureChain;
  5131. }
  5132. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5133. template <typename Dispatch>
  5134. VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
  5135. uint32_t * pPropertyCount,
  5136. VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
  5137. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5138. {
  5139. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5140. d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
  5141. reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
  5142. pPropertyCount,
  5143. reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
  5144. }
  5145. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5146. template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
  5147. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
  5148. PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const
  5149. {
  5150. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5151. std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
  5152. uint32_t propertyCount;
  5153. d.vkGetPhysicalDeviceSparseImageFormatProperties2(
  5154. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
  5155. properties.resize( propertyCount );
  5156. d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
  5157. reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
  5158. &propertyCount,
  5159. reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
  5160. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  5161. if ( propertyCount < properties.size() )
  5162. {
  5163. properties.resize( propertyCount );
  5164. }
  5165. return properties;
  5166. }
  5167. template <typename SparseImageFormatProperties2Allocator,
  5168. typename Dispatch,
  5169. typename B1,
  5170. typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties2>::value, int>::type>
  5171. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
  5172. PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
  5173. SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
  5174. Dispatch const & d ) const
  5175. {
  5176. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5177. std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
  5178. uint32_t propertyCount;
  5179. d.vkGetPhysicalDeviceSparseImageFormatProperties2(
  5180. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
  5181. properties.resize( propertyCount );
  5182. d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice,
  5183. reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
  5184. &propertyCount,
  5185. reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
  5186. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  5187. if ( propertyCount < properties.size() )
  5188. {
  5189. properties.resize( propertyCount );
  5190. }
  5191. return properties;
  5192. }
  5193. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5194. template <typename Dispatch>
  5195. VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  5196. VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
  5197. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5198. {
  5199. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5200. d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
  5201. }
  5202. template <typename Dispatch>
  5203. VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo,
  5204. VULKAN_HPP_NAMESPACE::Queue * pQueue,
  5205. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5206. {
  5207. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5208. d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) );
  5209. }
  5210. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5211. template <typename Dispatch>
  5212. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo,
  5213. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5214. {
  5215. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5216. VULKAN_HPP_NAMESPACE::Queue queue;
  5217. d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) );
  5218. return queue;
  5219. }
  5220. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5221. template <typename Dispatch>
  5222. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  5223. Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
  5224. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5225. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
  5226. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5227. {
  5228. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5229. return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device,
  5230. reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
  5231. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  5232. reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
  5233. }
  5234. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5235. template <typename Dispatch>
  5236. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
  5237. Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
  5238. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5239. Dispatch const & d ) const
  5240. {
  5241. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5242. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
  5243. VkResult result = d.vkCreateSamplerYcbcrConversion(
  5244. m_device,
  5245. reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
  5246. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  5247. reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
  5248. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" );
  5249. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), ycbcrConversion );
  5250. }
  5251. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  5252. template <typename Dispatch>
  5253. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
  5254. Device::createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
  5255. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5256. Dispatch const & d ) const
  5257. {
  5258. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5259. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
  5260. VkResult result = d.vkCreateSamplerYcbcrConversion(
  5261. m_device,
  5262. reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
  5263. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  5264. reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
  5265. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" );
  5266. return createResultValueType(
  5267. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  5268. UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  5269. }
  5270. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  5271. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5272. template <typename Dispatch>
  5273. VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
  5274. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5275. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5276. {
  5277. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5278. d.vkDestroySamplerYcbcrConversion(
  5279. m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  5280. }
  5281. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5282. template <typename Dispatch>
  5283. VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
  5284. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5285. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5286. {
  5287. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5288. d.vkDestroySamplerYcbcrConversion(
  5289. m_device,
  5290. static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
  5291. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  5292. }
  5293. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5294. template <typename Dispatch>
  5295. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
  5296. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5297. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5298. {
  5299. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5300. d.vkDestroySamplerYcbcrConversion(
  5301. m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  5302. }
  5303. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5304. template <typename Dispatch>
  5305. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
  5306. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5307. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5308. {
  5309. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5310. d.vkDestroySamplerYcbcrConversion(
  5311. m_device,
  5312. static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
  5313. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  5314. }
  5315. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5316. template <typename Dispatch>
  5317. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  5318. Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
  5319. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5320. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
  5321. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5322. {
  5323. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5324. return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device,
  5325. reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
  5326. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  5327. reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
  5328. }
  5329. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5330. template <typename Dispatch>
  5331. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
  5332. Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
  5333. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5334. Dispatch const & d ) const
  5335. {
  5336. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5337. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
  5338. VkResult result = d.vkCreateDescriptorUpdateTemplate(
  5339. m_device,
  5340. reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
  5341. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  5342. reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
  5343. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" );
  5344. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorUpdateTemplate );
  5345. }
  5346. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  5347. template <typename Dispatch>
  5348. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
  5349. Device::createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
  5350. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5351. Dispatch const & d ) const
  5352. {
  5353. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5354. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
  5355. VkResult result = d.vkCreateDescriptorUpdateTemplate(
  5356. m_device,
  5357. reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
  5358. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  5359. reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
  5360. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" );
  5361. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  5362. UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>(
  5363. descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  5364. }
  5365. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  5366. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5367. template <typename Dispatch>
  5368. VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  5369. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5370. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5371. {
  5372. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5373. d.vkDestroyDescriptorUpdateTemplate(
  5374. m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  5375. }
  5376. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5377. template <typename Dispatch>
  5378. VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  5379. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5380. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5381. {
  5382. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5383. d.vkDestroyDescriptorUpdateTemplate(
  5384. m_device,
  5385. static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
  5386. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  5387. }
  5388. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5389. template <typename Dispatch>
  5390. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  5391. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5392. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5393. {
  5394. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5395. d.vkDestroyDescriptorUpdateTemplate(
  5396. m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  5397. }
  5398. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5399. template <typename Dispatch>
  5400. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  5401. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5402. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5403. {
  5404. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5405. d.vkDestroyDescriptorUpdateTemplate(
  5406. m_device,
  5407. static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
  5408. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  5409. }
  5410. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5411. template <typename Dispatch>
  5412. VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
  5413. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  5414. const void * pData,
  5415. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5416. {
  5417. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5418. d.vkUpdateDescriptorSetWithTemplate(
  5419. m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
  5420. }
  5421. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5422. template <typename DataType, typename Dispatch>
  5423. VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
  5424. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  5425. DataType const & data,
  5426. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5427. {
  5428. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5429. d.vkUpdateDescriptorSetWithTemplate( m_device,
  5430. static_cast<VkDescriptorSet>( descriptorSet ),
  5431. static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
  5432. reinterpret_cast<const void *>( &data ) );
  5433. }
  5434. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5435. template <typename Dispatch>
  5436. VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
  5437. VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
  5438. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5439. {
  5440. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5441. d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice,
  5442. reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
  5443. reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
  5444. }
  5445. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5446. template <typename Dispatch>
  5447. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties
  5448. PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,
  5449. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5450. {
  5451. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5452. VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
  5453. d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice,
  5454. reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
  5455. reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
  5456. return externalBufferProperties;
  5457. }
  5458. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5459. template <typename Dispatch>
  5460. VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
  5461. VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
  5462. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5463. {
  5464. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5465. d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice,
  5466. reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
  5467. reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
  5468. }
  5469. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5470. template <typename Dispatch>
  5471. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
  5472. PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,
  5473. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5474. {
  5475. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5476. VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
  5477. d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice,
  5478. reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
  5479. reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
  5480. return externalFenceProperties;
  5481. }
  5482. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5483. template <typename Dispatch>
  5484. VULKAN_HPP_INLINE void
  5485. PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
  5486. VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
  5487. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5488. {
  5489. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5490. d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice,
  5491. reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
  5492. reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
  5493. }
  5494. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5495. template <typename Dispatch>
  5496. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
  5497. PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
  5498. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5499. {
  5500. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5501. VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
  5502. d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice,
  5503. reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
  5504. reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
  5505. return externalSemaphoreProperties;
  5506. }
  5507. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5508. template <typename Dispatch>
  5509. VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
  5510. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
  5511. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5512. {
  5513. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5514. d.vkGetDescriptorSetLayoutSupport(
  5515. m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
  5516. }
  5517. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5518. template <typename Dispatch>
  5519. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
  5520. Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
  5521. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5522. {
  5523. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5524. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
  5525. d.vkGetDescriptorSetLayoutSupport(
  5526. m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
  5527. return support;
  5528. }
  5529. template <typename X, typename Y, typename... Z, typename Dispatch>
  5530. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  5531. Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
  5532. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5533. {
  5534. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5535. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  5536. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
  5537. d.vkGetDescriptorSetLayoutSupport(
  5538. m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
  5539. return structureChain;
  5540. }
  5541. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5542. //=== VK_VERSION_1_2 ===
  5543. template <typename Dispatch>
  5544. VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
  5545. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  5546. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  5547. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  5548. uint32_t maxDrawCount,
  5549. uint32_t stride,
  5550. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5551. {
  5552. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5553. d.vkCmdDrawIndirectCount( m_commandBuffer,
  5554. static_cast<VkBuffer>( buffer ),
  5555. static_cast<VkDeviceSize>( offset ),
  5556. static_cast<VkBuffer>( countBuffer ),
  5557. static_cast<VkDeviceSize>( countBufferOffset ),
  5558. maxDrawCount,
  5559. stride );
  5560. }
  5561. template <typename Dispatch>
  5562. VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer,
  5563. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  5564. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  5565. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  5566. uint32_t maxDrawCount,
  5567. uint32_t stride,
  5568. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5569. {
  5570. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5571. d.vkCmdDrawIndexedIndirectCount( m_commandBuffer,
  5572. static_cast<VkBuffer>( buffer ),
  5573. static_cast<VkDeviceSize>( offset ),
  5574. static_cast<VkBuffer>( countBuffer ),
  5575. static_cast<VkDeviceSize>( countBufferOffset ),
  5576. maxDrawCount,
  5577. stride );
  5578. }
  5579. template <typename Dispatch>
  5580. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
  5581. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5582. VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
  5583. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5584. {
  5585. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5586. return static_cast<Result>( d.vkCreateRenderPass2( m_device,
  5587. reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
  5588. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  5589. reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
  5590. }
  5591. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5592. template <typename Dispatch>
  5593. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
  5594. Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
  5595. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5596. Dispatch const & d ) const
  5597. {
  5598. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5599. VULKAN_HPP_NAMESPACE::RenderPass renderPass;
  5600. VkResult result =
  5601. d.vkCreateRenderPass2( m_device,
  5602. reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
  5603. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  5604. reinterpret_cast<VkRenderPass *>( &renderPass ) );
  5605. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" );
  5606. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
  5607. }
  5608. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  5609. template <typename Dispatch>
  5610. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
  5611. Device::createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
  5612. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5613. Dispatch const & d ) const
  5614. {
  5615. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5616. VULKAN_HPP_NAMESPACE::RenderPass renderPass;
  5617. VkResult result =
  5618. d.vkCreateRenderPass2( m_device,
  5619. reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
  5620. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  5621. reinterpret_cast<VkRenderPass *>( &renderPass ) );
  5622. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" );
  5623. return createResultValueType(
  5624. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  5625. UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  5626. }
  5627. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  5628. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5629. template <typename Dispatch>
  5630. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
  5631. const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
  5632. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5633. {
  5634. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5635. d.vkCmdBeginRenderPass2(
  5636. m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
  5637. }
  5638. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5639. template <typename Dispatch>
  5640. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
  5641. const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
  5642. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5643. {
  5644. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5645. d.vkCmdBeginRenderPass2(
  5646. m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
  5647. }
  5648. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5649. template <typename Dispatch>
  5650. VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
  5651. const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
  5652. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5653. {
  5654. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5655. d.vkCmdNextSubpass2(
  5656. m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
  5657. }
  5658. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5659. template <typename Dispatch>
  5660. VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
  5661. const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
  5662. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5663. {
  5664. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5665. d.vkCmdNextSubpass2(
  5666. m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  5667. }
  5668. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5669. template <typename Dispatch>
  5670. VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
  5671. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5672. {
  5673. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5674. d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
  5675. }
  5676. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5677. template <typename Dispatch>
  5678. VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
  5679. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5680. {
  5681. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5682. d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  5683. }
  5684. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5685. template <typename Dispatch>
  5686. VULKAN_HPP_INLINE void
  5687. Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5688. {
  5689. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5690. d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
  5691. }
  5692. template <typename Dispatch>
  5693. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
  5694. uint64_t * pValue,
  5695. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5696. {
  5697. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5698. return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
  5699. }
  5700. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5701. template <typename Dispatch>
  5702. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
  5703. Dispatch const & d ) const
  5704. {
  5705. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5706. uint64_t value;
  5707. VkResult result = d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value );
  5708. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" );
  5709. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), value );
  5710. }
  5711. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5712. template <typename Dispatch>
  5713. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
  5714. uint64_t timeout,
  5715. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5716. {
  5717. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5718. return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
  5719. }
  5720. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5721. template <typename Dispatch>
  5722. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  5723. Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
  5724. {
  5725. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5726. VkResult result = d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
  5727. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  5728. VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores",
  5729. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
  5730. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  5731. }
  5732. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5733. template <typename Dispatch>
  5734. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,
  5735. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5736. {
  5737. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5738. return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
  5739. }
  5740. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5741. template <typename Dispatch>
  5742. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  5743. Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
  5744. {
  5745. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5746. VkResult result = d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
  5747. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
  5748. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  5749. }
  5750. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5751. template <typename Dispatch>
  5752. VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
  5753. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5754. {
  5755. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5756. return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
  5757. }
  5758. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5759. template <typename Dispatch>
  5760. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
  5761. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5762. {
  5763. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5764. VkDeviceAddress result = d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  5765. return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
  5766. }
  5767. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5768. template <typename Dispatch>
  5769. VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
  5770. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5771. {
  5772. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5773. return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
  5774. }
  5775. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5776. template <typename Dispatch>
  5777. VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
  5778. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5779. {
  5780. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5781. uint64_t result = d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  5782. return result;
  5783. }
  5784. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5785. template <typename Dispatch>
  5786. VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
  5787. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5788. {
  5789. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5790. return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
  5791. }
  5792. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5793. template <typename Dispatch>
  5794. VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,
  5795. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5796. {
  5797. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5798. uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
  5799. return result;
  5800. }
  5801. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5802. //=== VK_VERSION_1_3 ===
  5803. template <typename Dispatch>
  5804. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolProperties( uint32_t * pToolCount,
  5805. VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,
  5806. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5807. {
  5808. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5809. return static_cast<Result>(
  5810. d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
  5811. }
  5812. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5813. template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch>
  5814. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  5815. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
  5816. PhysicalDevice::getToolProperties( Dispatch const & d ) const
  5817. {
  5818. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5819. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties;
  5820. uint32_t toolCount;
  5821. VkResult result;
  5822. do
  5823. {
  5824. result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr );
  5825. if ( ( result == VK_SUCCESS ) && toolCount )
  5826. {
  5827. toolProperties.resize( toolCount );
  5828. result =
  5829. d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
  5830. }
  5831. } while ( result == VK_INCOMPLETE );
  5832. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
  5833. VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
  5834. if ( toolCount < toolProperties.size() )
  5835. {
  5836. toolProperties.resize( toolCount );
  5837. }
  5838. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
  5839. }
  5840. template <typename PhysicalDeviceToolPropertiesAllocator,
  5841. typename Dispatch,
  5842. typename B1,
  5843. typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceToolProperties>::value, int>::type>
  5844. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  5845. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
  5846. PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const
  5847. {
  5848. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5849. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties(
  5850. physicalDeviceToolPropertiesAllocator );
  5851. uint32_t toolCount;
  5852. VkResult result;
  5853. do
  5854. {
  5855. result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr );
  5856. if ( ( result == VK_SUCCESS ) && toolCount )
  5857. {
  5858. toolProperties.resize( toolCount );
  5859. result =
  5860. d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
  5861. }
  5862. } while ( result == VK_INCOMPLETE );
  5863. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
  5864. VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
  5865. if ( toolCount < toolProperties.size() )
  5866. {
  5867. toolProperties.resize( toolCount );
  5868. }
  5869. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
  5870. }
  5871. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5872. template <typename Dispatch>
  5873. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,
  5874. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5875. VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,
  5876. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5877. {
  5878. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5879. return static_cast<Result>( d.vkCreatePrivateDataSlot( m_device,
  5880. reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ),
  5881. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  5882. reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
  5883. }
  5884. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5885. template <typename Dispatch>
  5886. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type
  5887. Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
  5888. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5889. Dispatch const & d ) const
  5890. {
  5891. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5892. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
  5893. VkResult result =
  5894. d.vkCreatePrivateDataSlot( m_device,
  5895. reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
  5896. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  5897. reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
  5898. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" );
  5899. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), privateDataSlot );
  5900. }
  5901. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  5902. template <typename Dispatch>
  5903. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type
  5904. Device::createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
  5905. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5906. Dispatch const & d ) const
  5907. {
  5908. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5909. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
  5910. VkResult result =
  5911. d.vkCreatePrivateDataSlot( m_device,
  5912. reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
  5913. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  5914. reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
  5915. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" );
  5916. return createResultValueType(
  5917. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  5918. UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  5919. }
  5920. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  5921. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5922. template <typename Dispatch>
  5923. VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  5924. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5925. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5926. {
  5927. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5928. d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  5929. }
  5930. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5931. template <typename Dispatch>
  5932. VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  5933. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5934. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5935. {
  5936. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5937. d.vkDestroyPrivateDataSlot(
  5938. m_device,
  5939. static_cast<VkPrivateDataSlot>( privateDataSlot ),
  5940. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  5941. }
  5942. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5943. template <typename Dispatch>
  5944. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  5945. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  5946. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5947. {
  5948. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5949. d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  5950. }
  5951. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5952. template <typename Dispatch>
  5953. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  5954. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  5955. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5956. {
  5957. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5958. d.vkDestroyPrivateDataSlot(
  5959. m_device,
  5960. static_cast<VkPrivateDataSlot>( privateDataSlot ),
  5961. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  5962. }
  5963. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  5964. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  5965. template <typename Dispatch>
  5966. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
  5967. uint64_t objectHandle,
  5968. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  5969. uint64_t data,
  5970. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5971. {
  5972. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5973. return static_cast<Result>(
  5974. d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
  5975. }
  5976. #else
  5977. template <typename Dispatch>
  5978. VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
  5979. uint64_t objectHandle,
  5980. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  5981. uint64_t data,
  5982. Dispatch const & d ) const
  5983. {
  5984. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5985. VkResult result =
  5986. d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
  5987. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" );
  5988. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  5989. }
  5990. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  5991. template <typename Dispatch>
  5992. VULKAN_HPP_INLINE void Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
  5993. uint64_t objectHandle,
  5994. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  5995. uint64_t * pData,
  5996. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  5997. {
  5998. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  5999. d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
  6000. }
  6001. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6002. template <typename Dispatch>
  6003. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
  6004. uint64_t objectHandle,
  6005. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  6006. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6007. {
  6008. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6009. uint64_t data;
  6010. d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
  6011. return data;
  6012. }
  6013. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6014. template <typename Dispatch>
  6015. VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event,
  6016. const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
  6017. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6018. {
  6019. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6020. d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
  6021. }
  6022. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6023. template <typename Dispatch>
  6024. VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event,
  6025. const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
  6026. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6027. {
  6028. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6029. d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
  6030. }
  6031. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6032. template <typename Dispatch>
  6033. VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event,
  6034. VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask,
  6035. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6036. {
  6037. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6038. d.vkCmdResetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
  6039. }
  6040. template <typename Dispatch>
  6041. VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( uint32_t eventCount,
  6042. const VULKAN_HPP_NAMESPACE::Event * pEvents,
  6043. const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,
  6044. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6045. {
  6046. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6047. d.vkCmdWaitEvents2(
  6048. m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
  6049. }
  6050. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6051. template <typename Dispatch>
  6052. VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
  6053. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,
  6054. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  6055. {
  6056. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6057. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  6058. VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
  6059. # else
  6060. if ( events.size() != dependencyInfos.size() )
  6061. {
  6062. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" );
  6063. }
  6064. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  6065. d.vkCmdWaitEvents2( m_commandBuffer,
  6066. events.size(),
  6067. reinterpret_cast<const VkEvent *>( events.data() ),
  6068. reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
  6069. }
  6070. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6071. template <typename Dispatch>
  6072. VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
  6073. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6074. {
  6075. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6076. d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
  6077. }
  6078. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6079. template <typename Dispatch>
  6080. VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
  6081. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6082. {
  6083. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6084. d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
  6085. }
  6086. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6087. template <typename Dispatch>
  6088. VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
  6089. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  6090. uint32_t query,
  6091. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6092. {
  6093. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6094. d.vkCmdWriteTimestamp2( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
  6095. }
  6096. template <typename Dispatch>
  6097. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2( uint32_t submitCount,
  6098. const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,
  6099. VULKAN_HPP_NAMESPACE::Fence fence,
  6100. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6101. {
  6102. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6103. return static_cast<Result>( d.vkQueueSubmit2( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
  6104. }
  6105. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6106. template <typename Dispatch>
  6107. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2(
  6108. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
  6109. {
  6110. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6111. VkResult result = d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) );
  6112. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" );
  6113. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  6114. }
  6115. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6116. template <typename Dispatch>
  6117. VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,
  6118. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6119. {
  6120. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6121. d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
  6122. }
  6123. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6124. template <typename Dispatch>
  6125. VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,
  6126. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6127. {
  6128. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6129. d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
  6130. }
  6131. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6132. template <typename Dispatch>
  6133. VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6134. {
  6135. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6136. d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
  6137. }
  6138. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6139. template <typename Dispatch>
  6140. VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6141. {
  6142. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6143. d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
  6144. }
  6145. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6146. template <typename Dispatch>
  6147. VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,
  6148. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6149. {
  6150. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6151. d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
  6152. }
  6153. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6154. template <typename Dispatch>
  6155. VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,
  6156. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6157. {
  6158. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6159. d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
  6160. }
  6161. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6162. template <typename Dispatch>
  6163. VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,
  6164. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6165. {
  6166. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6167. d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
  6168. }
  6169. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6170. template <typename Dispatch>
  6171. VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,
  6172. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6173. {
  6174. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6175. d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
  6176. }
  6177. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6178. template <typename Dispatch>
  6179. VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6180. {
  6181. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6182. d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
  6183. }
  6184. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6185. template <typename Dispatch>
  6186. VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6187. {
  6188. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6189. d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
  6190. }
  6191. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6192. template <typename Dispatch>
  6193. VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,
  6194. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6195. {
  6196. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6197. d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
  6198. }
  6199. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6200. template <typename Dispatch>
  6201. VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,
  6202. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6203. {
  6204. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6205. d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
  6206. }
  6207. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6208. template <typename Dispatch>
  6209. VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,
  6210. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6211. {
  6212. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6213. d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
  6214. }
  6215. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6216. template <typename Dispatch>
  6217. VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,
  6218. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6219. {
  6220. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6221. d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
  6222. }
  6223. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6224. template <typename Dispatch>
  6225. VULKAN_HPP_INLINE void CommandBuffer::endRendering( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6226. {
  6227. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6228. d.vkCmdEndRendering( m_commandBuffer );
  6229. }
  6230. template <typename Dispatch>
  6231. VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6232. {
  6233. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6234. d.vkCmdSetCullMode( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
  6235. }
  6236. template <typename Dispatch>
  6237. VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6238. {
  6239. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6240. d.vkCmdSetFrontFace( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
  6241. }
  6242. template <typename Dispatch>
  6243. VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,
  6244. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6245. {
  6246. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6247. d.vkCmdSetPrimitiveTopology( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
  6248. }
  6249. template <typename Dispatch>
  6250. VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( uint32_t viewportCount,
  6251. const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
  6252. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6253. {
  6254. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6255. d.vkCmdSetViewportWithCount( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
  6256. }
  6257. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6258. template <typename Dispatch>
  6259. VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
  6260. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6261. {
  6262. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6263. d.vkCmdSetViewportWithCount( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
  6264. }
  6265. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6266. template <typename Dispatch>
  6267. VULKAN_HPP_INLINE void
  6268. CommandBuffer::setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6269. {
  6270. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6271. d.vkCmdSetScissorWithCount( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
  6272. }
  6273. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6274. template <typename Dispatch>
  6275. VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
  6276. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6277. {
  6278. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6279. d.vkCmdSetScissorWithCount( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
  6280. }
  6281. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6282. template <typename Dispatch>
  6283. VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding,
  6284. uint32_t bindingCount,
  6285. const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
  6286. const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
  6287. const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
  6288. const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,
  6289. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6290. {
  6291. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6292. d.vkCmdBindVertexBuffers2( m_commandBuffer,
  6293. firstBinding,
  6294. bindingCount,
  6295. reinterpret_cast<const VkBuffer *>( pBuffers ),
  6296. reinterpret_cast<const VkDeviceSize *>( pOffsets ),
  6297. reinterpret_cast<const VkDeviceSize *>( pSizes ),
  6298. reinterpret_cast<const VkDeviceSize *>( pStrides ) );
  6299. }
  6300. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6301. template <typename Dispatch>
  6302. VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding,
  6303. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
  6304. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
  6305. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
  6306. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,
  6307. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  6308. {
  6309. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6310. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  6311. VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
  6312. VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
  6313. VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
  6314. # else
  6315. if ( buffers.size() != offsets.size() )
  6316. {
  6317. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" );
  6318. }
  6319. if ( !sizes.empty() && buffers.size() != sizes.size() )
  6320. {
  6321. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" );
  6322. }
  6323. if ( !strides.empty() && buffers.size() != strides.size() )
  6324. {
  6325. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" );
  6326. }
  6327. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  6328. d.vkCmdBindVertexBuffers2( m_commandBuffer,
  6329. firstBinding,
  6330. buffers.size(),
  6331. reinterpret_cast<const VkBuffer *>( buffers.data() ),
  6332. reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
  6333. reinterpret_cast<const VkDeviceSize *>( sizes.data() ),
  6334. reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
  6335. }
  6336. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6337. template <typename Dispatch>
  6338. VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6339. {
  6340. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6341. d.vkCmdSetDepthTestEnable( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
  6342. }
  6343. template <typename Dispatch>
  6344. VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6345. {
  6346. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6347. d.vkCmdSetDepthWriteEnable( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
  6348. }
  6349. template <typename Dispatch>
  6350. VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6351. {
  6352. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6353. d.vkCmdSetDepthCompareOp( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
  6354. }
  6355. template <typename Dispatch>
  6356. VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,
  6357. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6358. {
  6359. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6360. d.vkCmdSetDepthBoundsTestEnable( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
  6361. }
  6362. template <typename Dispatch>
  6363. VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6364. {
  6365. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6366. d.vkCmdSetStencilTestEnable( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
  6367. }
  6368. template <typename Dispatch>
  6369. VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
  6370. VULKAN_HPP_NAMESPACE::StencilOp failOp,
  6371. VULKAN_HPP_NAMESPACE::StencilOp passOp,
  6372. VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
  6373. VULKAN_HPP_NAMESPACE::CompareOp compareOp,
  6374. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6375. {
  6376. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6377. d.vkCmdSetStencilOp( m_commandBuffer,
  6378. static_cast<VkStencilFaceFlags>( faceMask ),
  6379. static_cast<VkStencilOp>( failOp ),
  6380. static_cast<VkStencilOp>( passOp ),
  6381. static_cast<VkStencilOp>( depthFailOp ),
  6382. static_cast<VkCompareOp>( compareOp ) );
  6383. }
  6384. template <typename Dispatch>
  6385. VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,
  6386. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6387. {
  6388. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6389. d.vkCmdSetRasterizerDiscardEnable( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
  6390. }
  6391. template <typename Dispatch>
  6392. VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6393. {
  6394. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6395. d.vkCmdSetDepthBiasEnable( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
  6396. }
  6397. template <typename Dispatch>
  6398. VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,
  6399. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6400. {
  6401. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6402. d.vkCmdSetPrimitiveRestartEnable( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
  6403. }
  6404. template <typename Dispatch>
  6405. VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,
  6406. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  6407. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6408. {
  6409. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6410. d.vkGetDeviceBufferMemoryRequirements(
  6411. m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  6412. }
  6413. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6414. template <typename Dispatch>
  6415. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  6416. Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6417. {
  6418. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6419. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  6420. d.vkGetDeviceBufferMemoryRequirements(
  6421. m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  6422. return memoryRequirements;
  6423. }
  6424. template <typename X, typename Y, typename... Z, typename Dispatch>
  6425. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  6426. Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6427. {
  6428. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6429. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  6430. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  6431. d.vkGetDeviceBufferMemoryRequirements(
  6432. m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  6433. return structureChain;
  6434. }
  6435. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6436. template <typename Dispatch>
  6437. VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
  6438. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  6439. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6440. {
  6441. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6442. d.vkGetDeviceImageMemoryRequirements(
  6443. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  6444. }
  6445. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6446. template <typename Dispatch>
  6447. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  6448. Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6449. {
  6450. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6451. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  6452. d.vkGetDeviceImageMemoryRequirements(
  6453. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  6454. return memoryRequirements;
  6455. }
  6456. template <typename X, typename Y, typename... Z, typename Dispatch>
  6457. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  6458. Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6459. {
  6460. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6461. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  6462. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  6463. d.vkGetDeviceImageMemoryRequirements(
  6464. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  6465. return structureChain;
  6466. }
  6467. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6468. template <typename Dispatch>
  6469. VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
  6470. uint32_t * pSparseMemoryRequirementCount,
  6471. VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
  6472. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6473. {
  6474. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6475. d.vkGetDeviceImageSparseMemoryRequirements( m_device,
  6476. reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
  6477. pSparseMemoryRequirementCount,
  6478. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
  6479. }
  6480. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6481. template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
  6482. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
  6483. Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const
  6484. {
  6485. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6486. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
  6487. uint32_t sparseMemoryRequirementCount;
  6488. d.vkGetDeviceImageSparseMemoryRequirements(
  6489. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
  6490. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  6491. d.vkGetDeviceImageSparseMemoryRequirements( m_device,
  6492. reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
  6493. &sparseMemoryRequirementCount,
  6494. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  6495. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  6496. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  6497. {
  6498. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  6499. }
  6500. return sparseMemoryRequirements;
  6501. }
  6502. template <typename SparseImageMemoryRequirements2Allocator,
  6503. typename Dispatch,
  6504. typename B1,
  6505. typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type>
  6506. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
  6507. Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
  6508. SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
  6509. Dispatch const & d ) const
  6510. {
  6511. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6512. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
  6513. sparseImageMemoryRequirements2Allocator );
  6514. uint32_t sparseMemoryRequirementCount;
  6515. d.vkGetDeviceImageSparseMemoryRequirements(
  6516. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
  6517. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  6518. d.vkGetDeviceImageSparseMemoryRequirements( m_device,
  6519. reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
  6520. &sparseMemoryRequirementCount,
  6521. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  6522. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  6523. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  6524. {
  6525. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  6526. }
  6527. return sparseMemoryRequirements;
  6528. }
  6529. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6530. //=== VK_KHR_surface ===
  6531. template <typename Dispatch>
  6532. VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6533. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  6534. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6535. {
  6536. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6537. d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  6538. }
  6539. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6540. template <typename Dispatch>
  6541. VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6542. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  6543. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6544. {
  6545. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6546. d.vkDestroySurfaceKHR( m_instance,
  6547. static_cast<VkSurfaceKHR>( surface ),
  6548. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  6549. }
  6550. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6551. template <typename Dispatch>
  6552. VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6553. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  6554. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6555. {
  6556. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6557. d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  6558. }
  6559. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6560. template <typename Dispatch>
  6561. VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6562. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  6563. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6564. {
  6565. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6566. d.vkDestroySurfaceKHR( m_instance,
  6567. static_cast<VkSurfaceKHR>( surface ),
  6568. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  6569. }
  6570. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6571. template <typename Dispatch>
  6572. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex,
  6573. VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6574. VULKAN_HPP_NAMESPACE::Bool32 * pSupported,
  6575. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6576. {
  6577. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6578. return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR(
  6579. m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( pSupported ) ) );
  6580. }
  6581. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6582. template <typename Dispatch>
  6583. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type
  6584. PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  6585. {
  6586. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6587. VULKAN_HPP_NAMESPACE::Bool32 supported;
  6588. VkResult result = d.vkGetPhysicalDeviceSurfaceSupportKHR(
  6589. m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( &supported ) );
  6590. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" );
  6591. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), supported );
  6592. }
  6593. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6594. template <typename Dispatch>
  6595. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6596. VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities,
  6597. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6598. {
  6599. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6600. return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
  6601. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) );
  6602. }
  6603. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6604. template <typename Dispatch>
  6605. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type
  6606. PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  6607. {
  6608. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6609. VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
  6610. VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
  6611. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) );
  6612. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" );
  6613. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
  6614. }
  6615. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6616. template <typename Dispatch>
  6617. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6618. uint32_t * pSurfaceFormatCount,
  6619. VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats,
  6620. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6621. {
  6622. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6623. return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR(
  6624. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) );
  6625. }
  6626. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6627. template <typename SurfaceFormatKHRAllocator, typename Dispatch>
  6628. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
  6629. PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  6630. {
  6631. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6632. std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats;
  6633. uint32_t surfaceFormatCount;
  6634. VkResult result;
  6635. do
  6636. {
  6637. result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr );
  6638. if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
  6639. {
  6640. surfaceFormats.resize( surfaceFormatCount );
  6641. result = d.vkGetPhysicalDeviceSurfaceFormatsKHR(
  6642. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) );
  6643. }
  6644. } while ( result == VK_INCOMPLETE );
  6645. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
  6646. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  6647. if ( surfaceFormatCount < surfaceFormats.size() )
  6648. {
  6649. surfaceFormats.resize( surfaceFormatCount );
  6650. }
  6651. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
  6652. }
  6653. template <typename SurfaceFormatKHRAllocator,
  6654. typename Dispatch,
  6655. typename B1,
  6656. typename std::enable_if<std::is_same<typename B1::value_type, SurfaceFormatKHR>::value, int>::type>
  6657. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type
  6658. PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6659. SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator,
  6660. Dispatch const & d ) const
  6661. {
  6662. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6663. std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator );
  6664. uint32_t surfaceFormatCount;
  6665. VkResult result;
  6666. do
  6667. {
  6668. result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr );
  6669. if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
  6670. {
  6671. surfaceFormats.resize( surfaceFormatCount );
  6672. result = d.vkGetPhysicalDeviceSurfaceFormatsKHR(
  6673. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) );
  6674. }
  6675. } while ( result == VK_INCOMPLETE );
  6676. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" );
  6677. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  6678. if ( surfaceFormatCount < surfaceFormats.size() )
  6679. {
  6680. surfaceFormats.resize( surfaceFormatCount );
  6681. }
  6682. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
  6683. }
  6684. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6685. template <typename Dispatch>
  6686. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6687. uint32_t * pPresentModeCount,
  6688. VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
  6689. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6690. {
  6691. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6692. return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR(
  6693. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
  6694. }
  6695. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6696. template <typename PresentModeKHRAllocator, typename Dispatch>
  6697. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
  6698. PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  6699. {
  6700. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6701. std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes;
  6702. uint32_t presentModeCount;
  6703. VkResult result;
  6704. do
  6705. {
  6706. result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr );
  6707. if ( ( result == VK_SUCCESS ) && presentModeCount )
  6708. {
  6709. presentModes.resize( presentModeCount );
  6710. result = d.vkGetPhysicalDeviceSurfacePresentModesKHR(
  6711. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
  6712. }
  6713. } while ( result == VK_INCOMPLETE );
  6714. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
  6715. VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
  6716. if ( presentModeCount < presentModes.size() )
  6717. {
  6718. presentModes.resize( presentModeCount );
  6719. }
  6720. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
  6721. }
  6722. template <typename PresentModeKHRAllocator,
  6723. typename Dispatch,
  6724. typename B1,
  6725. typename std::enable_if<std::is_same<typename B1::value_type, PresentModeKHR>::value, int>::type>
  6726. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
  6727. PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6728. PresentModeKHRAllocator & presentModeKHRAllocator,
  6729. Dispatch const & d ) const
  6730. {
  6731. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6732. std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
  6733. uint32_t presentModeCount;
  6734. VkResult result;
  6735. do
  6736. {
  6737. result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr );
  6738. if ( ( result == VK_SUCCESS ) && presentModeCount )
  6739. {
  6740. presentModes.resize( presentModeCount );
  6741. result = d.vkGetPhysicalDeviceSurfacePresentModesKHR(
  6742. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
  6743. }
  6744. } while ( result == VK_INCOMPLETE );
  6745. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" );
  6746. VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
  6747. if ( presentModeCount < presentModes.size() )
  6748. {
  6749. presentModes.resize( presentModeCount );
  6750. }
  6751. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
  6752. }
  6753. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6754. //=== VK_KHR_swapchain ===
  6755. template <typename Dispatch>
  6756. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo,
  6757. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  6758. VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain,
  6759. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6760. {
  6761. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6762. return static_cast<Result>( d.vkCreateSwapchainKHR( m_device,
  6763. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ),
  6764. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  6765. reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) );
  6766. }
  6767. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6768. template <typename Dispatch>
  6769. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type
  6770. Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
  6771. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  6772. Dispatch const & d ) const
  6773. {
  6774. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6775. VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
  6776. VkResult result =
  6777. d.vkCreateSwapchainKHR( m_device,
  6778. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
  6779. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  6780. reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
  6781. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" );
  6782. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchain );
  6783. }
  6784. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  6785. template <typename Dispatch>
  6786. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type
  6787. Device::createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
  6788. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  6789. Dispatch const & d ) const
  6790. {
  6791. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6792. VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
  6793. VkResult result =
  6794. d.vkCreateSwapchainKHR( m_device,
  6795. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
  6796. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  6797. reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
  6798. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" );
  6799. return createResultValueType(
  6800. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  6801. UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  6802. }
  6803. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  6804. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6805. template <typename Dispatch>
  6806. VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  6807. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  6808. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6809. {
  6810. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6811. d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  6812. }
  6813. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6814. template <typename Dispatch>
  6815. VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  6816. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  6817. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6818. {
  6819. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6820. d.vkDestroySwapchainKHR( m_device,
  6821. static_cast<VkSwapchainKHR>( swapchain ),
  6822. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  6823. }
  6824. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6825. template <typename Dispatch>
  6826. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  6827. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  6828. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6829. {
  6830. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6831. d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  6832. }
  6833. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6834. template <typename Dispatch>
  6835. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  6836. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  6837. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6838. {
  6839. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6840. d.vkDestroySwapchainKHR( m_device,
  6841. static_cast<VkSwapchainKHR>( swapchain ),
  6842. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  6843. }
  6844. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6845. template <typename Dispatch>
  6846. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  6847. uint32_t * pSwapchainImageCount,
  6848. VULKAN_HPP_NAMESPACE::Image * pSwapchainImages,
  6849. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6850. {
  6851. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6852. return static_cast<Result>(
  6853. d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage *>( pSwapchainImages ) ) );
  6854. }
  6855. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6856. template <typename ImageAllocator, typename Dispatch>
  6857. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type
  6858. Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  6859. {
  6860. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6861. std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages;
  6862. uint32_t swapchainImageCount;
  6863. VkResult result;
  6864. do
  6865. {
  6866. result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr );
  6867. if ( ( result == VK_SUCCESS ) && swapchainImageCount )
  6868. {
  6869. swapchainImages.resize( swapchainImageCount );
  6870. result = d.vkGetSwapchainImagesKHR(
  6871. m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) );
  6872. }
  6873. } while ( result == VK_INCOMPLETE );
  6874. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
  6875. VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
  6876. if ( swapchainImageCount < swapchainImages.size() )
  6877. {
  6878. swapchainImages.resize( swapchainImageCount );
  6879. }
  6880. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchainImages );
  6881. }
  6882. template <typename ImageAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, Image>::value, int>::type>
  6883. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type
  6884. Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d ) const
  6885. {
  6886. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6887. std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages( imageAllocator );
  6888. uint32_t swapchainImageCount;
  6889. VkResult result;
  6890. do
  6891. {
  6892. result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr );
  6893. if ( ( result == VK_SUCCESS ) && swapchainImageCount )
  6894. {
  6895. swapchainImages.resize( swapchainImageCount );
  6896. result = d.vkGetSwapchainImagesKHR(
  6897. m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) );
  6898. }
  6899. } while ( result == VK_INCOMPLETE );
  6900. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" );
  6901. VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
  6902. if ( swapchainImageCount < swapchainImages.size() )
  6903. {
  6904. swapchainImages.resize( swapchainImageCount );
  6905. }
  6906. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchainImages );
  6907. }
  6908. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6909. template <typename Dispatch>
  6910. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  6911. uint64_t timeout,
  6912. VULKAN_HPP_NAMESPACE::Semaphore semaphore,
  6913. VULKAN_HPP_NAMESPACE::Fence fence,
  6914. uint32_t * pImageIndex,
  6915. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6916. {
  6917. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6918. return static_cast<Result>( d.vkAcquireNextImageKHR(
  6919. m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
  6920. }
  6921. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6922. template <typename Dispatch>
  6923. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  6924. uint64_t timeout,
  6925. VULKAN_HPP_NAMESPACE::Semaphore semaphore,
  6926. VULKAN_HPP_NAMESPACE::Fence fence,
  6927. Dispatch const & d ) const
  6928. {
  6929. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6930. uint32_t imageIndex;
  6931. VkResult result = d.vkAcquireNextImageKHR(
  6932. m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex );
  6933. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  6934. VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR",
  6935. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  6936. VULKAN_HPP_NAMESPACE::Result::eTimeout,
  6937. VULKAN_HPP_NAMESPACE::Result::eNotReady,
  6938. VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  6939. return ResultValue<uint32_t>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
  6940. }
  6941. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6942. template <typename Dispatch>
  6943. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo,
  6944. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6945. {
  6946. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6947. return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) );
  6948. }
  6949. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6950. template <typename Dispatch>
  6951. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo,
  6952. Dispatch const & d ) const
  6953. {
  6954. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6955. VkResult result = d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) );
  6956. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  6957. VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR",
  6958. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  6959. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  6960. }
  6961. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6962. template <typename Dispatch>
  6963. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR(
  6964. VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6965. {
  6966. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6967. return static_cast<Result>(
  6968. d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) );
  6969. }
  6970. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6971. template <typename Dispatch>
  6972. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type
  6973. Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const
  6974. {
  6975. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6976. VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
  6977. VkResult result =
  6978. d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) );
  6979. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" );
  6980. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), deviceGroupPresentCapabilities );
  6981. }
  6982. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  6983. template <typename Dispatch>
  6984. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  6985. VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
  6986. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  6987. {
  6988. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6989. return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR(
  6990. m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
  6991. }
  6992. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  6993. template <typename Dispatch>
  6994. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
  6995. Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  6996. {
  6997. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  6998. VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
  6999. VkResult result = d.vkGetDeviceGroupSurfacePresentModesKHR(
  7000. m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
  7001. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" );
  7002. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), modes );
  7003. }
  7004. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7005. template <typename Dispatch>
  7006. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  7007. uint32_t * pRectCount,
  7008. VULKAN_HPP_NAMESPACE::Rect2D * pRects,
  7009. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7010. {
  7011. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7012. return static_cast<Result>(
  7013. d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) );
  7014. }
  7015. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7016. template <typename Rect2DAllocator, typename Dispatch>
  7017. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type
  7018. PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  7019. {
  7020. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7021. std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects;
  7022. uint32_t rectCount;
  7023. VkResult result;
  7024. do
  7025. {
  7026. result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr );
  7027. if ( ( result == VK_SUCCESS ) && rectCount )
  7028. {
  7029. rects.resize( rectCount );
  7030. result = d.vkGetPhysicalDevicePresentRectanglesKHR(
  7031. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) );
  7032. }
  7033. } while ( result == VK_INCOMPLETE );
  7034. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
  7035. VULKAN_HPP_ASSERT( rectCount <= rects.size() );
  7036. if ( rectCount < rects.size() )
  7037. {
  7038. rects.resize( rectCount );
  7039. }
  7040. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), rects );
  7041. }
  7042. template <typename Rect2DAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, Rect2D>::value, int>::type>
  7043. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type
  7044. PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d ) const
  7045. {
  7046. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7047. std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects( rect2DAllocator );
  7048. uint32_t rectCount;
  7049. VkResult result;
  7050. do
  7051. {
  7052. result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr );
  7053. if ( ( result == VK_SUCCESS ) && rectCount )
  7054. {
  7055. rects.resize( rectCount );
  7056. result = d.vkGetPhysicalDevicePresentRectanglesKHR(
  7057. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) );
  7058. }
  7059. } while ( result == VK_INCOMPLETE );
  7060. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" );
  7061. VULKAN_HPP_ASSERT( rectCount <= rects.size() );
  7062. if ( rectCount < rects.size() )
  7063. {
  7064. rects.resize( rectCount );
  7065. }
  7066. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), rects );
  7067. }
  7068. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7069. template <typename Dispatch>
  7070. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo,
  7071. uint32_t * pImageIndex,
  7072. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7073. {
  7074. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7075. return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) );
  7076. }
  7077. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7078. template <typename Dispatch>
  7079. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo,
  7080. Dispatch const & d ) const
  7081. {
  7082. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7083. uint32_t imageIndex;
  7084. VkResult result = d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex );
  7085. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  7086. VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR",
  7087. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  7088. VULKAN_HPP_NAMESPACE::Result::eTimeout,
  7089. VULKAN_HPP_NAMESPACE::Result::eNotReady,
  7090. VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  7091. return ResultValue<uint32_t>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex );
  7092. }
  7093. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7094. //=== VK_KHR_display ===
  7095. template <typename Dispatch>
  7096. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount,
  7097. VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties,
  7098. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7099. {
  7100. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7101. return static_cast<Result>(
  7102. d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) );
  7103. }
  7104. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7105. template <typename DisplayPropertiesKHRAllocator, typename Dispatch>
  7106. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
  7107. PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const
  7108. {
  7109. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7110. std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties;
  7111. uint32_t propertyCount;
  7112. VkResult result;
  7113. do
  7114. {
  7115. result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
  7116. if ( ( result == VK_SUCCESS ) && propertyCount )
  7117. {
  7118. properties.resize( propertyCount );
  7119. result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) );
  7120. }
  7121. } while ( result == VK_INCOMPLETE );
  7122. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
  7123. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  7124. if ( propertyCount < properties.size() )
  7125. {
  7126. properties.resize( propertyCount );
  7127. }
  7128. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  7129. }
  7130. template <typename DisplayPropertiesKHRAllocator,
  7131. typename Dispatch,
  7132. typename B1,
  7133. typename std::enable_if<std::is_same<typename B1::value_type, DisplayPropertiesKHR>::value, int>::type>
  7134. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type
  7135. PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d ) const
  7136. {
  7137. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7138. std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator );
  7139. uint32_t propertyCount;
  7140. VkResult result;
  7141. do
  7142. {
  7143. result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
  7144. if ( ( result == VK_SUCCESS ) && propertyCount )
  7145. {
  7146. properties.resize( propertyCount );
  7147. result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) );
  7148. }
  7149. } while ( result == VK_INCOMPLETE );
  7150. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" );
  7151. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  7152. if ( propertyCount < properties.size() )
  7153. {
  7154. properties.resize( propertyCount );
  7155. }
  7156. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  7157. }
  7158. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7159. template <typename Dispatch>
  7160. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount,
  7161. VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties,
  7162. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7163. {
  7164. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7165. return static_cast<Result>(
  7166. d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) );
  7167. }
  7168. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7169. template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch>
  7170. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  7171. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
  7172. PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const
  7173. {
  7174. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7175. std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties;
  7176. uint32_t propertyCount;
  7177. VkResult result;
  7178. do
  7179. {
  7180. result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
  7181. if ( ( result == VK_SUCCESS ) && propertyCount )
  7182. {
  7183. properties.resize( propertyCount );
  7184. result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
  7185. m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) );
  7186. }
  7187. } while ( result == VK_INCOMPLETE );
  7188. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
  7189. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  7190. if ( propertyCount < properties.size() )
  7191. {
  7192. properties.resize( propertyCount );
  7193. }
  7194. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  7195. }
  7196. template <typename DisplayPlanePropertiesKHRAllocator,
  7197. typename Dispatch,
  7198. typename B1,
  7199. typename std::enable_if<std::is_same<typename B1::value_type, DisplayPlanePropertiesKHR>::value, int>::type>
  7200. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  7201. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type
  7202. PhysicalDevice::getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const
  7203. {
  7204. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7205. std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties( displayPlanePropertiesKHRAllocator );
  7206. uint32_t propertyCount;
  7207. VkResult result;
  7208. do
  7209. {
  7210. result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
  7211. if ( ( result == VK_SUCCESS ) && propertyCount )
  7212. {
  7213. properties.resize( propertyCount );
  7214. result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
  7215. m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) );
  7216. }
  7217. } while ( result == VK_INCOMPLETE );
  7218. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" );
  7219. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  7220. if ( propertyCount < properties.size() )
  7221. {
  7222. properties.resize( propertyCount );
  7223. }
  7224. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  7225. }
  7226. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7227. template <typename Dispatch>
  7228. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex,
  7229. uint32_t * pDisplayCount,
  7230. VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays,
  7231. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7232. {
  7233. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7234. return static_cast<Result>(
  7235. d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) );
  7236. }
  7237. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7238. template <typename DisplayKHRAllocator, typename Dispatch>
  7239. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type
  7240. PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const
  7241. {
  7242. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7243. std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays;
  7244. uint32_t displayCount;
  7245. VkResult result;
  7246. do
  7247. {
  7248. result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr );
  7249. if ( ( result == VK_SUCCESS ) && displayCount )
  7250. {
  7251. displays.resize( displayCount );
  7252. result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) );
  7253. }
  7254. } while ( result == VK_INCOMPLETE );
  7255. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
  7256. VULKAN_HPP_ASSERT( displayCount <= displays.size() );
  7257. if ( displayCount < displays.size() )
  7258. {
  7259. displays.resize( displayCount );
  7260. }
  7261. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displays );
  7262. }
  7263. template <typename DisplayKHRAllocator,
  7264. typename Dispatch,
  7265. typename B1,
  7266. typename std::enable_if<std::is_same<typename B1::value_type, DisplayKHR>::value, int>::type>
  7267. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type
  7268. PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d ) const
  7269. {
  7270. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7271. std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator );
  7272. uint32_t displayCount;
  7273. VkResult result;
  7274. do
  7275. {
  7276. result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr );
  7277. if ( ( result == VK_SUCCESS ) && displayCount )
  7278. {
  7279. displays.resize( displayCount );
  7280. result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) );
  7281. }
  7282. } while ( result == VK_INCOMPLETE );
  7283. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
  7284. VULKAN_HPP_ASSERT( displayCount <= displays.size() );
  7285. if ( displayCount < displays.size() )
  7286. {
  7287. displays.resize( displayCount );
  7288. }
  7289. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displays );
  7290. }
  7291. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7292. template <typename Dispatch>
  7293. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  7294. uint32_t * pPropertyCount,
  7295. VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties,
  7296. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7297. {
  7298. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7299. return static_cast<Result>( d.vkGetDisplayModePropertiesKHR(
  7300. m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) );
  7301. }
  7302. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7303. template <typename DisplayModePropertiesKHRAllocator, typename Dispatch>
  7304. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  7305. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
  7306. PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  7307. {
  7308. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7309. std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties;
  7310. uint32_t propertyCount;
  7311. VkResult result;
  7312. do
  7313. {
  7314. result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
  7315. if ( ( result == VK_SUCCESS ) && propertyCount )
  7316. {
  7317. properties.resize( propertyCount );
  7318. result = d.vkGetDisplayModePropertiesKHR(
  7319. m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) );
  7320. }
  7321. } while ( result == VK_INCOMPLETE );
  7322. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
  7323. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  7324. if ( propertyCount < properties.size() )
  7325. {
  7326. properties.resize( propertyCount );
  7327. }
  7328. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  7329. }
  7330. template <typename DisplayModePropertiesKHRAllocator,
  7331. typename Dispatch,
  7332. typename B1,
  7333. typename std::enable_if<std::is_same<typename B1::value_type, DisplayModePropertiesKHR>::value, int>::type>
  7334. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  7335. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type
  7336. PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  7337. DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator,
  7338. Dispatch const & d ) const
  7339. {
  7340. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7341. std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties( displayModePropertiesKHRAllocator );
  7342. uint32_t propertyCount;
  7343. VkResult result;
  7344. do
  7345. {
  7346. result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
  7347. if ( ( result == VK_SUCCESS ) && propertyCount )
  7348. {
  7349. properties.resize( propertyCount );
  7350. result = d.vkGetDisplayModePropertiesKHR(
  7351. m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) );
  7352. }
  7353. } while ( result == VK_INCOMPLETE );
  7354. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" );
  7355. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  7356. if ( propertyCount < properties.size() )
  7357. {
  7358. properties.resize( propertyCount );
  7359. }
  7360. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  7361. }
  7362. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7363. template <typename Dispatch>
  7364. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  7365. const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo,
  7366. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  7367. VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode,
  7368. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7369. {
  7370. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7371. return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice,
  7372. static_cast<VkDisplayKHR>( display ),
  7373. reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ),
  7374. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  7375. reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) );
  7376. }
  7377. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7378. template <typename Dispatch>
  7379. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type
  7380. PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  7381. const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,
  7382. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7383. Dispatch const & d ) const
  7384. {
  7385. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7386. VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
  7387. VkResult result =
  7388. d.vkCreateDisplayModeKHR( m_physicalDevice,
  7389. static_cast<VkDisplayKHR>( display ),
  7390. reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
  7391. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7392. reinterpret_cast<VkDisplayModeKHR *>( &mode ) );
  7393. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" );
  7394. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), mode );
  7395. }
  7396. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  7397. template <typename Dispatch>
  7398. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type
  7399. PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  7400. const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo,
  7401. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7402. Dispatch const & d ) const
  7403. {
  7404. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7405. VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
  7406. VkResult result =
  7407. d.vkCreateDisplayModeKHR( m_physicalDevice,
  7408. static_cast<VkDisplayKHR>( display ),
  7409. reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ),
  7410. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7411. reinterpret_cast<VkDisplayModeKHR *>( &mode ) );
  7412. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" );
  7413. return createResultValueType(
  7414. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  7415. UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>( mode, ObjectDestroy<PhysicalDevice, Dispatch>( *this, allocator, d ) ) );
  7416. }
  7417. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  7418. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7419. template <typename Dispatch>
  7420. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  7421. PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode,
  7422. uint32_t planeIndex,
  7423. VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities,
  7424. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7425. {
  7426. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7427. return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR(
  7428. m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) );
  7429. }
  7430. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7431. template <typename Dispatch>
  7432. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type
  7433. PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d ) const
  7434. {
  7435. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7436. VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
  7437. VkResult result = d.vkGetDisplayPlaneCapabilitiesKHR(
  7438. m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) );
  7439. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
  7440. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities );
  7441. }
  7442. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7443. template <typename Dispatch>
  7444. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo,
  7445. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  7446. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  7447. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7448. {
  7449. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7450. return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance,
  7451. reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ),
  7452. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  7453. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  7454. }
  7455. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7456. template <typename Dispatch>
  7457. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  7458. Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,
  7459. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7460. Dispatch const & d ) const
  7461. {
  7462. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7463. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7464. VkResult result = d.vkCreateDisplayPlaneSurfaceKHR(
  7465. m_instance,
  7466. reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
  7467. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7468. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7469. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" );
  7470. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  7471. }
  7472. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  7473. template <typename Dispatch>
  7474. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  7475. Instance::createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo,
  7476. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7477. Dispatch const & d ) const
  7478. {
  7479. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7480. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7481. VkResult result = d.vkCreateDisplayPlaneSurfaceKHR(
  7482. m_instance,
  7483. reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ),
  7484. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7485. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7486. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" );
  7487. return createResultValueType(
  7488. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  7489. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  7490. }
  7491. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  7492. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7493. //=== VK_KHR_display_swapchain ===
  7494. template <typename Dispatch>
  7495. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount,
  7496. const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos,
  7497. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  7498. VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
  7499. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7500. {
  7501. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7502. return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device,
  7503. swapchainCount,
  7504. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ),
  7505. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  7506. reinterpret_cast<VkSwapchainKHR *>( pSwapchains ) ) );
  7507. }
  7508. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7509. template <typename SwapchainKHRAllocator, typename Dispatch>
  7510. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type
  7511. Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
  7512. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7513. Dispatch const & d ) const
  7514. {
  7515. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7516. std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size() );
  7517. VkResult result = d.vkCreateSharedSwapchainsKHR(
  7518. m_device,
  7519. createInfos.size(),
  7520. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
  7521. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7522. reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
  7523. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
  7524. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchains );
  7525. }
  7526. template <typename SwapchainKHRAllocator,
  7527. typename Dispatch,
  7528. typename B0,
  7529. typename std::enable_if<std::is_same<typename B0::value_type, SwapchainKHR>::value, int>::type>
  7530. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type
  7531. Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
  7532. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7533. SwapchainKHRAllocator & swapchainKHRAllocator,
  7534. Dispatch const & d ) const
  7535. {
  7536. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7537. std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size(), swapchainKHRAllocator );
  7538. VkResult result = d.vkCreateSharedSwapchainsKHR(
  7539. m_device,
  7540. createInfos.size(),
  7541. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
  7542. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7543. reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
  7544. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
  7545. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchains );
  7546. }
  7547. template <typename Dispatch>
  7548. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type
  7549. Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
  7550. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7551. Dispatch const & d ) const
  7552. {
  7553. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7554. VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
  7555. VkResult result = d.vkCreateSharedSwapchainsKHR(
  7556. m_device,
  7557. 1,
  7558. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
  7559. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7560. reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
  7561. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" );
  7562. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchain );
  7563. }
  7564. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  7565. template <typename Dispatch, typename SwapchainKHRAllocator>
  7566. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  7567. typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
  7568. Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
  7569. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7570. Dispatch const & d ) const
  7571. {
  7572. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7573. std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() );
  7574. VkResult result = d.vkCreateSharedSwapchainsKHR(
  7575. m_device,
  7576. createInfos.size(),
  7577. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
  7578. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7579. reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
  7580. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
  7581. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains;
  7582. uniqueSwapchains.reserve( createInfos.size() );
  7583. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  7584. for ( auto const & swapchain : swapchains )
  7585. {
  7586. uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) );
  7587. }
  7588. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueSwapchains ) );
  7589. }
  7590. template <typename Dispatch,
  7591. typename SwapchainKHRAllocator,
  7592. typename B0,
  7593. typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type>
  7594. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  7595. typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type
  7596. Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos,
  7597. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7598. SwapchainKHRAllocator & swapchainKHRAllocator,
  7599. Dispatch const & d ) const
  7600. {
  7601. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7602. std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() );
  7603. VkResult result = d.vkCreateSharedSwapchainsKHR(
  7604. m_device,
  7605. createInfos.size(),
  7606. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ),
  7607. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7608. reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) );
  7609. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
  7610. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator );
  7611. uniqueSwapchains.reserve( createInfos.size() );
  7612. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  7613. for ( auto const & swapchain : swapchains )
  7614. {
  7615. uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) );
  7616. }
  7617. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueSwapchains ) );
  7618. }
  7619. template <typename Dispatch>
  7620. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type
  7621. Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo,
  7622. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7623. Dispatch const & d ) const
  7624. {
  7625. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7626. VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
  7627. VkResult result = d.vkCreateSharedSwapchainsKHR(
  7628. m_device,
  7629. 1,
  7630. reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ),
  7631. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7632. reinterpret_cast<VkSwapchainKHR *>( &swapchain ) );
  7633. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" );
  7634. return createResultValueType(
  7635. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  7636. UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  7637. }
  7638. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  7639. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7640. #if defined( VK_USE_PLATFORM_XLIB_KHR )
  7641. //=== VK_KHR_xlib_surface ===
  7642. template <typename Dispatch>
  7643. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo,
  7644. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  7645. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  7646. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7647. {
  7648. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7649. return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance,
  7650. reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ),
  7651. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  7652. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  7653. }
  7654. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7655. template <typename Dispatch>
  7656. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  7657. Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,
  7658. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7659. Dispatch const & d ) const
  7660. {
  7661. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7662. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7663. VkResult result =
  7664. d.vkCreateXlibSurfaceKHR( m_instance,
  7665. reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
  7666. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7667. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7668. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" );
  7669. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  7670. }
  7671. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  7672. template <typename Dispatch>
  7673. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  7674. Instance::createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo,
  7675. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7676. Dispatch const & d ) const
  7677. {
  7678. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7679. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7680. VkResult result =
  7681. d.vkCreateXlibSurfaceKHR( m_instance,
  7682. reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ),
  7683. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7684. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7685. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" );
  7686. return createResultValueType(
  7687. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  7688. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  7689. }
  7690. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  7691. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7692. template <typename Dispatch>
  7693. VULKAN_HPP_INLINE Bool32
  7694. PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7695. {
  7696. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7697. return static_cast<Bool32>( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) );
  7698. }
  7699. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7700. template <typename Dispatch>
  7701. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
  7702. PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7703. {
  7704. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7705. VkBool32 result = d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
  7706. return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
  7707. }
  7708. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7709. #endif /*VK_USE_PLATFORM_XLIB_KHR*/
  7710. #if defined( VK_USE_PLATFORM_XCB_KHR )
  7711. //=== VK_KHR_xcb_surface ===
  7712. template <typename Dispatch>
  7713. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo,
  7714. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  7715. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  7716. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7717. {
  7718. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7719. return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance,
  7720. reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ),
  7721. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  7722. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  7723. }
  7724. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7725. template <typename Dispatch>
  7726. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  7727. Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,
  7728. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7729. Dispatch const & d ) const
  7730. {
  7731. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7732. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7733. VkResult result =
  7734. d.vkCreateXcbSurfaceKHR( m_instance,
  7735. reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
  7736. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7737. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7738. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" );
  7739. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  7740. }
  7741. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  7742. template <typename Dispatch>
  7743. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  7744. Instance::createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo,
  7745. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7746. Dispatch const & d ) const
  7747. {
  7748. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7749. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7750. VkResult result =
  7751. d.vkCreateXcbSurfaceKHR( m_instance,
  7752. reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ),
  7753. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7754. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7755. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" );
  7756. return createResultValueType(
  7757. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  7758. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  7759. }
  7760. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  7761. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7762. template <typename Dispatch>
  7763. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
  7764. xcb_connection_t * connection,
  7765. xcb_visualid_t visual_id,
  7766. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7767. {
  7768. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7769. return static_cast<Bool32>( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) );
  7770. }
  7771. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7772. template <typename Dispatch>
  7773. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex,
  7774. xcb_connection_t & connection,
  7775. xcb_visualid_t visual_id,
  7776. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7777. {
  7778. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7779. VkBool32 result = d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
  7780. return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
  7781. }
  7782. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7783. #endif /*VK_USE_PLATFORM_XCB_KHR*/
  7784. #if defined( VK_USE_PLATFORM_WAYLAND_KHR )
  7785. //=== VK_KHR_wayland_surface ===
  7786. template <typename Dispatch>
  7787. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo,
  7788. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  7789. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  7790. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7791. {
  7792. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7793. return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance,
  7794. reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ),
  7795. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  7796. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  7797. }
  7798. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7799. template <typename Dispatch>
  7800. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  7801. Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,
  7802. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7803. Dispatch const & d ) const
  7804. {
  7805. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7806. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7807. VkResult result = d.vkCreateWaylandSurfaceKHR(
  7808. m_instance,
  7809. reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
  7810. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7811. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7812. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" );
  7813. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  7814. }
  7815. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  7816. template <typename Dispatch>
  7817. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  7818. Instance::createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo,
  7819. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7820. Dispatch const & d ) const
  7821. {
  7822. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7823. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7824. VkResult result = d.vkCreateWaylandSurfaceKHR(
  7825. m_instance,
  7826. reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ),
  7827. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7828. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7829. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" );
  7830. return createResultValueType(
  7831. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  7832. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  7833. }
  7834. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  7835. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7836. template <typename Dispatch>
  7837. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex,
  7838. struct wl_display * display,
  7839. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7840. {
  7841. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7842. return static_cast<Bool32>( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) );
  7843. }
  7844. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7845. template <typename Dispatch>
  7846. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
  7847. PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7848. {
  7849. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7850. VkBool32 result = d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
  7851. return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
  7852. }
  7853. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7854. #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  7855. #if defined( VK_USE_PLATFORM_ANDROID_KHR )
  7856. //=== VK_KHR_android_surface ===
  7857. template <typename Dispatch>
  7858. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo,
  7859. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  7860. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  7861. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7862. {
  7863. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7864. return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance,
  7865. reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ),
  7866. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  7867. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  7868. }
  7869. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7870. template <typename Dispatch>
  7871. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  7872. Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,
  7873. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7874. Dispatch const & d ) const
  7875. {
  7876. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7877. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7878. VkResult result = d.vkCreateAndroidSurfaceKHR(
  7879. m_instance,
  7880. reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
  7881. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7882. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7883. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" );
  7884. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  7885. }
  7886. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  7887. template <typename Dispatch>
  7888. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  7889. Instance::createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo,
  7890. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7891. Dispatch const & d ) const
  7892. {
  7893. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7894. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7895. VkResult result = d.vkCreateAndroidSurfaceKHR(
  7896. m_instance,
  7897. reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ),
  7898. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7899. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7900. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" );
  7901. return createResultValueType(
  7902. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  7903. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  7904. }
  7905. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  7906. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7907. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  7908. #if defined( VK_USE_PLATFORM_WIN32_KHR )
  7909. //=== VK_KHR_win32_surface ===
  7910. template <typename Dispatch>
  7911. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo,
  7912. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  7913. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  7914. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7915. {
  7916. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7917. return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance,
  7918. reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ),
  7919. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  7920. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  7921. }
  7922. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7923. template <typename Dispatch>
  7924. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  7925. Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,
  7926. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7927. Dispatch const & d ) const
  7928. {
  7929. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7930. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7931. VkResult result =
  7932. d.vkCreateWin32SurfaceKHR( m_instance,
  7933. reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
  7934. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7935. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7936. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" );
  7937. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  7938. }
  7939. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  7940. template <typename Dispatch>
  7941. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  7942. Instance::createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo,
  7943. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7944. Dispatch const & d ) const
  7945. {
  7946. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7947. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  7948. VkResult result =
  7949. d.vkCreateWin32SurfaceKHR( m_instance,
  7950. reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ),
  7951. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7952. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  7953. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" );
  7954. return createResultValueType(
  7955. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  7956. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  7957. }
  7958. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  7959. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  7960. template <typename Dispatch>
  7961. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7962. {
  7963. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7964. return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) );
  7965. }
  7966. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7967. //=== VK_EXT_debug_report ===
  7968. template <typename Dispatch>
  7969. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  7970. Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo,
  7971. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  7972. VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback,
  7973. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  7974. {
  7975. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7976. return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance,
  7977. reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ),
  7978. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  7979. reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) );
  7980. }
  7981. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  7982. template <typename Dispatch>
  7983. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type
  7984. Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,
  7985. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  7986. Dispatch const & d ) const
  7987. {
  7988. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  7989. VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
  7990. VkResult result = d.vkCreateDebugReportCallbackEXT(
  7991. m_instance,
  7992. reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
  7993. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  7994. reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) );
  7995. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" );
  7996. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), callback );
  7997. }
  7998. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  7999. template <typename Dispatch>
  8000. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type
  8001. Instance::createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo,
  8002. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8003. Dispatch const & d ) const
  8004. {
  8005. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8006. VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
  8007. VkResult result = d.vkCreateDebugReportCallbackEXT(
  8008. m_instance,
  8009. reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ),
  8010. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  8011. reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) );
  8012. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" );
  8013. return createResultValueType(
  8014. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  8015. UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>( callback, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  8016. }
  8017. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  8018. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8019. template <typename Dispatch>
  8020. VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
  8021. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8022. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8023. {
  8024. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8025. d.vkDestroyDebugReportCallbackEXT(
  8026. m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8027. }
  8028. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8029. template <typename Dispatch>
  8030. VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
  8031. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8032. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8033. {
  8034. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8035. d.vkDestroyDebugReportCallbackEXT(
  8036. m_instance,
  8037. static_cast<VkDebugReportCallbackEXT>( callback ),
  8038. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  8039. }
  8040. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8041. template <typename Dispatch>
  8042. VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
  8043. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8044. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8045. {
  8046. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8047. d.vkDestroyDebugReportCallbackEXT(
  8048. m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8049. }
  8050. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8051. template <typename Dispatch>
  8052. VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback,
  8053. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8054. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8055. {
  8056. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8057. d.vkDestroyDebugReportCallbackEXT(
  8058. m_instance,
  8059. static_cast<VkDebugReportCallbackEXT>( callback ),
  8060. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  8061. }
  8062. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8063. template <typename Dispatch>
  8064. VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
  8065. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_,
  8066. uint64_t object,
  8067. size_t location,
  8068. int32_t messageCode,
  8069. const char * pLayerPrefix,
  8070. const char * pMessage,
  8071. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8072. {
  8073. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8074. d.vkDebugReportMessageEXT( m_instance,
  8075. static_cast<VkDebugReportFlagsEXT>( flags ),
  8076. static_cast<VkDebugReportObjectTypeEXT>( objectType_ ),
  8077. object,
  8078. location,
  8079. messageCode,
  8080. pLayerPrefix,
  8081. pMessage );
  8082. }
  8083. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8084. template <typename Dispatch>
  8085. VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags,
  8086. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_,
  8087. uint64_t object,
  8088. size_t location,
  8089. int32_t messageCode,
  8090. const std::string & layerPrefix,
  8091. const std::string & message,
  8092. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8093. {
  8094. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8095. d.vkDebugReportMessageEXT( m_instance,
  8096. static_cast<VkDebugReportFlagsEXT>( flags ),
  8097. static_cast<VkDebugReportObjectTypeEXT>( objectType_ ),
  8098. object,
  8099. location,
  8100. messageCode,
  8101. layerPrefix.c_str(),
  8102. message.c_str() );
  8103. }
  8104. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8105. //=== VK_EXT_debug_marker ===
  8106. template <typename Dispatch>
  8107. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo,
  8108. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8109. {
  8110. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8111. return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) );
  8112. }
  8113. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8114. template <typename Dispatch>
  8115. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  8116. Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
  8117. {
  8118. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8119. VkResult result = d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) );
  8120. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" );
  8121. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  8122. }
  8123. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8124. template <typename Dispatch>
  8125. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo,
  8126. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8127. {
  8128. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8129. return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) );
  8130. }
  8131. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8132. template <typename Dispatch>
  8133. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  8134. Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
  8135. {
  8136. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8137. VkResult result = d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) );
  8138. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" );
  8139. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  8140. }
  8141. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8142. template <typename Dispatch>
  8143. VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
  8144. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8145. {
  8146. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8147. d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
  8148. }
  8149. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8150. template <typename Dispatch>
  8151. VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,
  8152. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8153. {
  8154. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8155. d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
  8156. }
  8157. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8158. template <typename Dispatch>
  8159. VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8160. {
  8161. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8162. d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
  8163. }
  8164. template <typename Dispatch>
  8165. VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo,
  8166. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8167. {
  8168. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8169. d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
  8170. }
  8171. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8172. template <typename Dispatch>
  8173. VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo,
  8174. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8175. {
  8176. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8177. d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
  8178. }
  8179. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8180. //=== VK_KHR_video_queue ===
  8181. template <typename Dispatch>
  8182. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile,
  8183. VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities,
  8184. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8185. {
  8186. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8187. return static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
  8188. m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( pVideoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) );
  8189. }
  8190. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8191. template <typename Dispatch>
  8192. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type
  8193. PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const
  8194. {
  8195. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8196. VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities;
  8197. VkResult result = d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
  8198. m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) );
  8199. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
  8200. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities );
  8201. }
  8202. template <typename X, typename Y, typename... Z, typename Dispatch>
  8203. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type
  8204. PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const
  8205. {
  8206. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8207. StructureChain<X, Y, Z...> structureChain;
  8208. VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>();
  8209. VkResult result = d.vkGetPhysicalDeviceVideoCapabilitiesKHR(
  8210. m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) );
  8211. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" );
  8212. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
  8213. }
  8214. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8215. template <typename Dispatch>
  8216. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  8217. PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo,
  8218. uint32_t * pVideoFormatPropertyCount,
  8219. VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties,
  8220. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8221. {
  8222. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8223. return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
  8224. reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ),
  8225. pVideoFormatPropertyCount,
  8226. reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) );
  8227. }
  8228. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8229. template <typename VideoFormatPropertiesKHRAllocator, typename Dispatch>
  8230. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  8231. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
  8232. PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const
  8233. {
  8234. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8235. std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties;
  8236. uint32_t videoFormatPropertyCount;
  8237. VkResult result;
  8238. do
  8239. {
  8240. result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
  8241. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr );
  8242. if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount )
  8243. {
  8244. videoFormatProperties.resize( videoFormatPropertyCount );
  8245. result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
  8246. reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
  8247. &videoFormatPropertyCount,
  8248. reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) );
  8249. }
  8250. } while ( result == VK_INCOMPLETE );
  8251. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
  8252. VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
  8253. if ( videoFormatPropertyCount < videoFormatProperties.size() )
  8254. {
  8255. videoFormatProperties.resize( videoFormatPropertyCount );
  8256. }
  8257. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoFormatProperties );
  8258. }
  8259. template <typename VideoFormatPropertiesKHRAllocator,
  8260. typename Dispatch,
  8261. typename B1,
  8262. typename std::enable_if<std::is_same<typename B1::value_type, VideoFormatPropertiesKHR>::value, int>::type>
  8263. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  8264. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type
  8265. PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo,
  8266. VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator,
  8267. Dispatch const & d ) const
  8268. {
  8269. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8270. std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties( videoFormatPropertiesKHRAllocator );
  8271. uint32_t videoFormatPropertyCount;
  8272. VkResult result;
  8273. do
  8274. {
  8275. result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR(
  8276. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr );
  8277. if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount )
  8278. {
  8279. videoFormatProperties.resize( videoFormatPropertyCount );
  8280. result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice,
  8281. reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ),
  8282. &videoFormatPropertyCount,
  8283. reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) );
  8284. }
  8285. } while ( result == VK_INCOMPLETE );
  8286. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" );
  8287. VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() );
  8288. if ( videoFormatPropertyCount < videoFormatProperties.size() )
  8289. {
  8290. videoFormatProperties.resize( videoFormatPropertyCount );
  8291. }
  8292. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoFormatProperties );
  8293. }
  8294. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8295. template <typename Dispatch>
  8296. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo,
  8297. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8298. VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession,
  8299. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8300. {
  8301. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8302. return static_cast<Result>( d.vkCreateVideoSessionKHR( m_device,
  8303. reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( pCreateInfo ),
  8304. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  8305. reinterpret_cast<VkVideoSessionKHR *>( pVideoSession ) ) );
  8306. }
  8307. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8308. template <typename Dispatch>
  8309. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::type
  8310. Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,
  8311. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8312. Dispatch const & d ) const
  8313. {
  8314. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8315. VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession;
  8316. VkResult result =
  8317. d.vkCreateVideoSessionKHR( m_device,
  8318. reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ),
  8319. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  8320. reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) );
  8321. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" );
  8322. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoSession );
  8323. }
  8324. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  8325. template <typename Dispatch>
  8326. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type
  8327. Device::createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo,
  8328. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8329. Dispatch const & d ) const
  8330. {
  8331. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8332. VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession;
  8333. VkResult result =
  8334. d.vkCreateVideoSessionKHR( m_device,
  8335. reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ),
  8336. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  8337. reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) );
  8338. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" );
  8339. return createResultValueType(
  8340. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  8341. UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>( videoSession, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  8342. }
  8343. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  8344. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8345. template <typename Dispatch>
  8346. VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
  8347. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8348. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8349. {
  8350. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8351. d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8352. }
  8353. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8354. template <typename Dispatch>
  8355. VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
  8356. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8357. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8358. {
  8359. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8360. d.vkDestroyVideoSessionKHR(
  8361. m_device,
  8362. static_cast<VkVideoSessionKHR>( videoSession ),
  8363. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  8364. }
  8365. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8366. template <typename Dispatch>
  8367. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
  8368. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8369. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8370. {
  8371. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8372. d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8373. }
  8374. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8375. template <typename Dispatch>
  8376. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
  8377. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8378. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8379. {
  8380. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8381. d.vkDestroyVideoSessionKHR(
  8382. m_device,
  8383. static_cast<VkVideoSessionKHR>( videoSession ),
  8384. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  8385. }
  8386. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8387. template <typename Dispatch>
  8388. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  8389. Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
  8390. uint32_t * pMemoryRequirementsCount,
  8391. VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements,
  8392. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8393. {
  8394. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8395. return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
  8396. static_cast<VkVideoSessionKHR>( videoSession ),
  8397. pMemoryRequirementsCount,
  8398. reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( pMemoryRequirements ) ) );
  8399. }
  8400. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8401. template <typename VideoSessionMemoryRequirementsKHRAllocator, typename Dispatch>
  8402. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  8403. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type
  8404. Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch const & d ) const
  8405. {
  8406. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8407. std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements;
  8408. uint32_t memoryRequirementsCount;
  8409. VkResult result;
  8410. do
  8411. {
  8412. result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr );
  8413. if ( ( result == VK_SUCCESS ) && memoryRequirementsCount )
  8414. {
  8415. memoryRequirements.resize( memoryRequirementsCount );
  8416. result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
  8417. static_cast<VkVideoSessionKHR>( videoSession ),
  8418. &memoryRequirementsCount,
  8419. reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) );
  8420. }
  8421. } while ( result == VK_INCOMPLETE );
  8422. VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() );
  8423. if ( memoryRequirementsCount < memoryRequirements.size() )
  8424. {
  8425. memoryRequirements.resize( memoryRequirementsCount );
  8426. }
  8427. return memoryRequirements;
  8428. }
  8429. template <typename VideoSessionMemoryRequirementsKHRAllocator,
  8430. typename Dispatch,
  8431. typename B1,
  8432. typename std::enable_if<std::is_same<typename B1::value_type, VideoSessionMemoryRequirementsKHR>::value, int>::type>
  8433. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  8434. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type
  8435. Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
  8436. VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator,
  8437. Dispatch const & d ) const
  8438. {
  8439. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8440. std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements(
  8441. videoSessionMemoryRequirementsKHRAllocator );
  8442. uint32_t memoryRequirementsCount;
  8443. VkResult result;
  8444. do
  8445. {
  8446. result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr );
  8447. if ( ( result == VK_SUCCESS ) && memoryRequirementsCount )
  8448. {
  8449. memoryRequirements.resize( memoryRequirementsCount );
  8450. result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device,
  8451. static_cast<VkVideoSessionKHR>( videoSession ),
  8452. &memoryRequirementsCount,
  8453. reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) );
  8454. }
  8455. } while ( result == VK_INCOMPLETE );
  8456. VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() );
  8457. if ( memoryRequirementsCount < memoryRequirements.size() )
  8458. {
  8459. memoryRequirements.resize( memoryRequirementsCount );
  8460. }
  8461. return memoryRequirements;
  8462. }
  8463. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8464. template <typename Dispatch>
  8465. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  8466. Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
  8467. uint32_t bindSessionMemoryInfoCount,
  8468. const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos,
  8469. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8470. {
  8471. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8472. return static_cast<Result>( d.vkBindVideoSessionMemoryKHR( m_device,
  8473. static_cast<VkVideoSessionKHR>( videoSession ),
  8474. bindSessionMemoryInfoCount,
  8475. reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( pBindSessionMemoryInfos ) ) );
  8476. }
  8477. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8478. template <typename Dispatch>
  8479. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindVideoSessionMemoryKHR(
  8480. VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession,
  8481. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos,
  8482. Dispatch const & d ) const
  8483. {
  8484. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8485. VkResult result = d.vkBindVideoSessionMemoryKHR( m_device,
  8486. static_cast<VkVideoSessionKHR>( videoSession ),
  8487. bindSessionMemoryInfos.size(),
  8488. reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( bindSessionMemoryInfos.data() ) );
  8489. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" );
  8490. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  8491. }
  8492. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8493. template <typename Dispatch>
  8494. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  8495. Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo,
  8496. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8497. VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters,
  8498. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8499. {
  8500. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8501. return static_cast<Result>( d.vkCreateVideoSessionParametersKHR( m_device,
  8502. reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( pCreateInfo ),
  8503. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  8504. reinterpret_cast<VkVideoSessionParametersKHR *>( pVideoSessionParameters ) ) );
  8505. }
  8506. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8507. template <typename Dispatch>
  8508. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::type
  8509. Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,
  8510. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8511. Dispatch const & d ) const
  8512. {
  8513. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8514. VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters;
  8515. VkResult result = d.vkCreateVideoSessionParametersKHR(
  8516. m_device,
  8517. reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ),
  8518. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  8519. reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) );
  8520. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" );
  8521. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoSessionParameters );
  8522. }
  8523. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  8524. template <typename Dispatch>
  8525. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type
  8526. Device::createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo,
  8527. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8528. Dispatch const & d ) const
  8529. {
  8530. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8531. VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters;
  8532. VkResult result = d.vkCreateVideoSessionParametersKHR(
  8533. m_device,
  8534. reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ),
  8535. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  8536. reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) );
  8537. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" );
  8538. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  8539. UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>(
  8540. videoSessionParameters, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  8541. }
  8542. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  8543. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8544. template <typename Dispatch>
  8545. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  8546. Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
  8547. const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo,
  8548. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8549. {
  8550. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8551. return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( m_device,
  8552. static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
  8553. reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) );
  8554. }
  8555. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8556. template <typename Dispatch>
  8557. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  8558. Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
  8559. const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo,
  8560. Dispatch const & d ) const
  8561. {
  8562. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8563. VkResult result = d.vkUpdateVideoSessionParametersKHR( m_device,
  8564. static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
  8565. reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) );
  8566. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" );
  8567. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  8568. }
  8569. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8570. template <typename Dispatch>
  8571. VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
  8572. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8573. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8574. {
  8575. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8576. d.vkDestroyVideoSessionParametersKHR(
  8577. m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8578. }
  8579. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8580. template <typename Dispatch>
  8581. VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
  8582. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8583. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8584. {
  8585. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8586. d.vkDestroyVideoSessionParametersKHR(
  8587. m_device,
  8588. static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
  8589. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  8590. }
  8591. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8592. template <typename Dispatch>
  8593. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
  8594. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8595. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8596. {
  8597. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8598. d.vkDestroyVideoSessionParametersKHR(
  8599. m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8600. }
  8601. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8602. template <typename Dispatch>
  8603. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters,
  8604. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8605. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8606. {
  8607. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8608. d.vkDestroyVideoSessionParametersKHR(
  8609. m_device,
  8610. static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ),
  8611. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  8612. }
  8613. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8614. template <typename Dispatch>
  8615. VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo,
  8616. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8617. {
  8618. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8619. d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) );
  8620. }
  8621. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8622. template <typename Dispatch>
  8623. VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo,
  8624. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8625. {
  8626. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8627. d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) );
  8628. }
  8629. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8630. template <typename Dispatch>
  8631. VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo,
  8632. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8633. {
  8634. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8635. d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) );
  8636. }
  8637. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8638. template <typename Dispatch>
  8639. VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo,
  8640. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8641. {
  8642. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8643. d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) );
  8644. }
  8645. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8646. template <typename Dispatch>
  8647. VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo,
  8648. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8649. {
  8650. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8651. d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) );
  8652. }
  8653. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8654. template <typename Dispatch>
  8655. VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo,
  8656. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8657. {
  8658. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8659. d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) );
  8660. }
  8661. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8662. //=== VK_KHR_video_decode_queue ===
  8663. template <typename Dispatch>
  8664. VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo,
  8665. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8666. {
  8667. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8668. d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( pDecodeInfo ) );
  8669. }
  8670. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8671. template <typename Dispatch>
  8672. VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo,
  8673. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8674. {
  8675. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8676. d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( &decodeInfo ) );
  8677. }
  8678. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8679. //=== VK_EXT_transform_feedback ===
  8680. template <typename Dispatch>
  8681. VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
  8682. uint32_t bindingCount,
  8683. const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
  8684. const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
  8685. const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
  8686. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8687. {
  8688. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8689. d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer,
  8690. firstBinding,
  8691. bindingCount,
  8692. reinterpret_cast<const VkBuffer *>( pBuffers ),
  8693. reinterpret_cast<const VkDeviceSize *>( pOffsets ),
  8694. reinterpret_cast<const VkDeviceSize *>( pSizes ) );
  8695. }
  8696. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8697. template <typename Dispatch>
  8698. VULKAN_HPP_INLINE void
  8699. CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding,
  8700. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
  8701. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
  8702. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
  8703. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  8704. {
  8705. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8706. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  8707. VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
  8708. VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
  8709. # else
  8710. if ( buffers.size() != offsets.size() )
  8711. {
  8712. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
  8713. }
  8714. if ( !sizes.empty() && buffers.size() != sizes.size() )
  8715. {
  8716. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
  8717. }
  8718. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  8719. d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer,
  8720. firstBinding,
  8721. buffers.size(),
  8722. reinterpret_cast<const VkBuffer *>( buffers.data() ),
  8723. reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
  8724. reinterpret_cast<const VkDeviceSize *>( sizes.data() ) );
  8725. }
  8726. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8727. template <typename Dispatch>
  8728. VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
  8729. uint32_t counterBufferCount,
  8730. const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
  8731. const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
  8732. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8733. {
  8734. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8735. d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer,
  8736. firstCounterBuffer,
  8737. counterBufferCount,
  8738. reinterpret_cast<const VkBuffer *>( pCounterBuffers ),
  8739. reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
  8740. }
  8741. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8742. template <typename Dispatch>
  8743. VULKAN_HPP_INLINE void
  8744. CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer,
  8745. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
  8746. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,
  8747. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  8748. {
  8749. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8750. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  8751. VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
  8752. # else
  8753. if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
  8754. {
  8755. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
  8756. }
  8757. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  8758. d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer,
  8759. firstCounterBuffer,
  8760. counterBuffers.size(),
  8761. reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
  8762. reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
  8763. }
  8764. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8765. template <typename Dispatch>
  8766. VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer,
  8767. uint32_t counterBufferCount,
  8768. const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers,
  8769. const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets,
  8770. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8771. {
  8772. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8773. d.vkCmdEndTransformFeedbackEXT( m_commandBuffer,
  8774. firstCounterBuffer,
  8775. counterBufferCount,
  8776. reinterpret_cast<const VkBuffer *>( pCounterBuffers ),
  8777. reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
  8778. }
  8779. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8780. template <typename Dispatch>
  8781. VULKAN_HPP_INLINE void
  8782. CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer,
  8783. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers,
  8784. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets,
  8785. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  8786. {
  8787. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8788. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  8789. VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
  8790. # else
  8791. if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
  8792. {
  8793. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
  8794. }
  8795. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  8796. d.vkCmdEndTransformFeedbackEXT( m_commandBuffer,
  8797. firstCounterBuffer,
  8798. counterBuffers.size(),
  8799. reinterpret_cast<const VkBuffer *>( counterBuffers.data() ),
  8800. reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
  8801. }
  8802. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8803. template <typename Dispatch>
  8804. VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  8805. uint32_t query,
  8806. VULKAN_HPP_NAMESPACE::QueryControlFlags flags,
  8807. uint32_t index,
  8808. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8809. {
  8810. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8811. d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
  8812. }
  8813. template <typename Dispatch>
  8814. VULKAN_HPP_INLINE void
  8815. CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8816. {
  8817. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8818. d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
  8819. }
  8820. template <typename Dispatch>
  8821. VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount,
  8822. uint32_t firstInstance,
  8823. VULKAN_HPP_NAMESPACE::Buffer counterBuffer,
  8824. VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset,
  8825. uint32_t counterOffset,
  8826. uint32_t vertexStride,
  8827. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8828. {
  8829. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8830. d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer,
  8831. instanceCount,
  8832. firstInstance,
  8833. static_cast<VkBuffer>( counterBuffer ),
  8834. static_cast<VkDeviceSize>( counterBufferOffset ),
  8835. counterOffset,
  8836. vertexStride );
  8837. }
  8838. //=== VK_NVX_binary_import ===
  8839. template <typename Dispatch>
  8840. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo,
  8841. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8842. VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule,
  8843. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8844. {
  8845. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8846. return static_cast<Result>( d.vkCreateCuModuleNVX( m_device,
  8847. reinterpret_cast<const VkCuModuleCreateInfoNVX *>( pCreateInfo ),
  8848. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  8849. reinterpret_cast<VkCuModuleNVX *>( pModule ) ) );
  8850. }
  8851. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8852. template <typename Dispatch>
  8853. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type
  8854. Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,
  8855. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8856. Dispatch const & d ) const
  8857. {
  8858. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8859. VULKAN_HPP_NAMESPACE::CuModuleNVX module;
  8860. VkResult result =
  8861. d.vkCreateCuModuleNVX( m_device,
  8862. reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
  8863. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  8864. reinterpret_cast<VkCuModuleNVX *>( &module ) );
  8865. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" );
  8866. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), module );
  8867. }
  8868. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  8869. template <typename Dispatch>
  8870. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type
  8871. Device::createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo,
  8872. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8873. Dispatch const & d ) const
  8874. {
  8875. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8876. VULKAN_HPP_NAMESPACE::CuModuleNVX module;
  8877. VkResult result =
  8878. d.vkCreateCuModuleNVX( m_device,
  8879. reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ),
  8880. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  8881. reinterpret_cast<VkCuModuleNVX *>( &module ) );
  8882. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" );
  8883. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  8884. UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>( module, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  8885. }
  8886. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  8887. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8888. template <typename Dispatch>
  8889. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo,
  8890. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8891. VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction,
  8892. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8893. {
  8894. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8895. return static_cast<Result>( d.vkCreateCuFunctionNVX( m_device,
  8896. reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ),
  8897. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  8898. reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) );
  8899. }
  8900. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8901. template <typename Dispatch>
  8902. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type
  8903. Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,
  8904. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8905. Dispatch const & d ) const
  8906. {
  8907. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8908. VULKAN_HPP_NAMESPACE::CuFunctionNVX function;
  8909. VkResult result =
  8910. d.vkCreateCuFunctionNVX( m_device,
  8911. reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
  8912. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  8913. reinterpret_cast<VkCuFunctionNVX *>( &function ) );
  8914. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" );
  8915. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), function );
  8916. }
  8917. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  8918. template <typename Dispatch>
  8919. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type
  8920. Device::createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo,
  8921. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8922. Dispatch const & d ) const
  8923. {
  8924. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8925. VULKAN_HPP_NAMESPACE::CuFunctionNVX function;
  8926. VkResult result =
  8927. d.vkCreateCuFunctionNVX( m_device,
  8928. reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ),
  8929. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  8930. reinterpret_cast<VkCuFunctionNVX *>( &function ) );
  8931. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" );
  8932. return createResultValueType(
  8933. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  8934. UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>( function, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  8935. }
  8936. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  8937. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8938. template <typename Dispatch>
  8939. VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
  8940. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8941. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8942. {
  8943. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8944. d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8945. }
  8946. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8947. template <typename Dispatch>
  8948. VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
  8949. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8950. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8951. {
  8952. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8953. d.vkDestroyCuModuleNVX( m_device,
  8954. static_cast<VkCuModuleNVX>( module ),
  8955. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  8956. }
  8957. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8958. template <typename Dispatch>
  8959. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
  8960. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8961. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8962. {
  8963. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8964. d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8965. }
  8966. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8967. template <typename Dispatch>
  8968. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module,
  8969. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8970. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8971. {
  8972. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8973. d.vkDestroyCuModuleNVX( m_device,
  8974. static_cast<VkCuModuleNVX>( module ),
  8975. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  8976. }
  8977. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8978. template <typename Dispatch>
  8979. VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
  8980. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  8981. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8982. {
  8983. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8984. d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  8985. }
  8986. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  8987. template <typename Dispatch>
  8988. VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
  8989. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  8990. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  8991. {
  8992. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  8993. d.vkDestroyCuFunctionNVX( m_device,
  8994. static_cast<VkCuFunctionNVX>( function ),
  8995. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  8996. }
  8997. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  8998. template <typename Dispatch>
  8999. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
  9000. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  9001. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9002. {
  9003. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9004. d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  9005. }
  9006. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9007. template <typename Dispatch>
  9008. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function,
  9009. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  9010. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9011. {
  9012. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9013. d.vkDestroyCuFunctionNVX( m_device,
  9014. static_cast<VkCuFunctionNVX>( function ),
  9015. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  9016. }
  9017. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9018. template <typename Dispatch>
  9019. VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo,
  9020. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9021. {
  9022. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9023. d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) );
  9024. }
  9025. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9026. template <typename Dispatch>
  9027. VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo,
  9028. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9029. {
  9030. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9031. d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) );
  9032. }
  9033. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9034. //=== VK_NVX_image_view_handle ===
  9035. template <typename Dispatch>
  9036. VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo,
  9037. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9038. {
  9039. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9040. return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) );
  9041. }
  9042. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9043. template <typename Dispatch>
  9044. VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info,
  9045. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9046. {
  9047. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9048. uint32_t result = d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) );
  9049. return result;
  9050. }
  9051. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9052. template <typename Dispatch>
  9053. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView,
  9054. VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties,
  9055. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9056. {
  9057. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9058. return static_cast<Result>(
  9059. d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) );
  9060. }
  9061. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9062. template <typename Dispatch>
  9063. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type
  9064. Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const
  9065. {
  9066. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9067. VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties;
  9068. VkResult result =
  9069. d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) );
  9070. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" );
  9071. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  9072. }
  9073. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9074. //=== VK_AMD_draw_indirect_count ===
  9075. template <typename Dispatch>
  9076. VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
  9077. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  9078. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  9079. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  9080. uint32_t maxDrawCount,
  9081. uint32_t stride,
  9082. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9083. {
  9084. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9085. d.vkCmdDrawIndirectCountAMD( m_commandBuffer,
  9086. static_cast<VkBuffer>( buffer ),
  9087. static_cast<VkDeviceSize>( offset ),
  9088. static_cast<VkBuffer>( countBuffer ),
  9089. static_cast<VkDeviceSize>( countBufferOffset ),
  9090. maxDrawCount,
  9091. stride );
  9092. }
  9093. template <typename Dispatch>
  9094. VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer,
  9095. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  9096. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  9097. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  9098. uint32_t maxDrawCount,
  9099. uint32_t stride,
  9100. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9101. {
  9102. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9103. d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer,
  9104. static_cast<VkBuffer>( buffer ),
  9105. static_cast<VkDeviceSize>( offset ),
  9106. static_cast<VkBuffer>( countBuffer ),
  9107. static_cast<VkDeviceSize>( countBufferOffset ),
  9108. maxDrawCount,
  9109. stride );
  9110. }
  9111. //=== VK_AMD_shader_info ===
  9112. template <typename Dispatch>
  9113. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  9114. VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
  9115. VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
  9116. size_t * pInfoSize,
  9117. void * pInfo,
  9118. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9119. {
  9120. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9121. return static_cast<Result>( d.vkGetShaderInfoAMD( m_device,
  9122. static_cast<VkPipeline>( pipeline ),
  9123. static_cast<VkShaderStageFlagBits>( shaderStage ),
  9124. static_cast<VkShaderInfoTypeAMD>( infoType ),
  9125. pInfoSize,
  9126. pInfo ) );
  9127. }
  9128. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9129. template <typename Uint8_tAllocator, typename Dispatch>
  9130. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
  9131. Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  9132. VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
  9133. VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
  9134. Dispatch const & d ) const
  9135. {
  9136. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9137. std::vector<uint8_t, Uint8_tAllocator> info;
  9138. size_t infoSize;
  9139. VkResult result;
  9140. do
  9141. {
  9142. result = d.vkGetShaderInfoAMD( m_device,
  9143. static_cast<VkPipeline>( pipeline ),
  9144. static_cast<VkShaderStageFlagBits>( shaderStage ),
  9145. static_cast<VkShaderInfoTypeAMD>( infoType ),
  9146. &infoSize,
  9147. nullptr );
  9148. if ( ( result == VK_SUCCESS ) && infoSize )
  9149. {
  9150. info.resize( infoSize );
  9151. result = d.vkGetShaderInfoAMD( m_device,
  9152. static_cast<VkPipeline>( pipeline ),
  9153. static_cast<VkShaderStageFlagBits>( shaderStage ),
  9154. static_cast<VkShaderInfoTypeAMD>( infoType ),
  9155. &infoSize,
  9156. reinterpret_cast<void *>( info.data() ) );
  9157. }
  9158. } while ( result == VK_INCOMPLETE );
  9159. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
  9160. VULKAN_HPP_ASSERT( infoSize <= info.size() );
  9161. if ( infoSize < info.size() )
  9162. {
  9163. info.resize( infoSize );
  9164. }
  9165. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), info );
  9166. }
  9167. template <typename Uint8_tAllocator,
  9168. typename Dispatch,
  9169. typename B1,
  9170. typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type>
  9171. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
  9172. Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  9173. VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage,
  9174. VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType,
  9175. Uint8_tAllocator & uint8_tAllocator,
  9176. Dispatch const & d ) const
  9177. {
  9178. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9179. std::vector<uint8_t, Uint8_tAllocator> info( uint8_tAllocator );
  9180. size_t infoSize;
  9181. VkResult result;
  9182. do
  9183. {
  9184. result = d.vkGetShaderInfoAMD( m_device,
  9185. static_cast<VkPipeline>( pipeline ),
  9186. static_cast<VkShaderStageFlagBits>( shaderStage ),
  9187. static_cast<VkShaderInfoTypeAMD>( infoType ),
  9188. &infoSize,
  9189. nullptr );
  9190. if ( ( result == VK_SUCCESS ) && infoSize )
  9191. {
  9192. info.resize( infoSize );
  9193. result = d.vkGetShaderInfoAMD( m_device,
  9194. static_cast<VkPipeline>( pipeline ),
  9195. static_cast<VkShaderStageFlagBits>( shaderStage ),
  9196. static_cast<VkShaderInfoTypeAMD>( infoType ),
  9197. &infoSize,
  9198. reinterpret_cast<void *>( info.data() ) );
  9199. }
  9200. } while ( result == VK_INCOMPLETE );
  9201. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" );
  9202. VULKAN_HPP_ASSERT( infoSize <= info.size() );
  9203. if ( infoSize < info.size() )
  9204. {
  9205. info.resize( infoSize );
  9206. }
  9207. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), info );
  9208. }
  9209. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9210. //=== VK_KHR_dynamic_rendering ===
  9211. template <typename Dispatch>
  9212. VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,
  9213. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9214. {
  9215. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9216. d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) );
  9217. }
  9218. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9219. template <typename Dispatch>
  9220. VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo,
  9221. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9222. {
  9223. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9224. d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) );
  9225. }
  9226. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9227. template <typename Dispatch>
  9228. VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9229. {
  9230. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9231. d.vkCmdEndRenderingKHR( m_commandBuffer );
  9232. }
  9233. #if defined( VK_USE_PLATFORM_GGP )
  9234. //=== VK_GGP_stream_descriptor_surface ===
  9235. template <typename Dispatch>
  9236. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  9237. Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo,
  9238. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  9239. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  9240. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9241. {
  9242. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9243. return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance,
  9244. reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ),
  9245. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  9246. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  9247. }
  9248. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9249. template <typename Dispatch>
  9250. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  9251. Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,
  9252. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  9253. Dispatch const & d ) const
  9254. {
  9255. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9256. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  9257. VkResult result = d.vkCreateStreamDescriptorSurfaceGGP(
  9258. m_instance,
  9259. reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
  9260. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  9261. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  9262. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" );
  9263. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  9264. }
  9265. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  9266. template <typename Dispatch>
  9267. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  9268. Instance::createStreamDescriptorSurfaceGGPUnique( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo,
  9269. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  9270. Dispatch const & d ) const
  9271. {
  9272. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9273. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  9274. VkResult result = d.vkCreateStreamDescriptorSurfaceGGP(
  9275. m_instance,
  9276. reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ),
  9277. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  9278. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  9279. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" );
  9280. return createResultValueType(
  9281. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  9282. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  9283. }
  9284. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  9285. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9286. #endif /*VK_USE_PLATFORM_GGP*/
  9287. //=== VK_NV_external_memory_capabilities ===
  9288. template <typename Dispatch>
  9289. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  9290. PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format,
  9291. VULKAN_HPP_NAMESPACE::ImageType type,
  9292. VULKAN_HPP_NAMESPACE::ImageTiling tiling,
  9293. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  9294. VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
  9295. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,
  9296. VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties,
  9297. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9298. {
  9299. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9300. return static_cast<Result>(
  9301. d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice,
  9302. static_cast<VkFormat>( format ),
  9303. static_cast<VkImageType>( type ),
  9304. static_cast<VkImageTiling>( tiling ),
  9305. static_cast<VkImageUsageFlags>( usage ),
  9306. static_cast<VkImageCreateFlags>( flags ),
  9307. static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
  9308. reinterpret_cast<VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) );
  9309. }
  9310. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9311. template <typename Dispatch>
  9312. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type
  9313. PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format,
  9314. VULKAN_HPP_NAMESPACE::ImageType type,
  9315. VULKAN_HPP_NAMESPACE::ImageTiling tiling,
  9316. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage,
  9317. VULKAN_HPP_NAMESPACE::ImageCreateFlags flags,
  9318. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType,
  9319. Dispatch const & d ) const
  9320. {
  9321. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9322. VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties;
  9323. VkResult result =
  9324. d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice,
  9325. static_cast<VkFormat>( format ),
  9326. static_cast<VkImageType>( type ),
  9327. static_cast<VkImageTiling>( tiling ),
  9328. static_cast<VkImageUsageFlags>( usage ),
  9329. static_cast<VkImageCreateFlags>( flags ),
  9330. static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ),
  9331. reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) );
  9332. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" );
  9333. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), externalImageFormatProperties );
  9334. }
  9335. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9336. #if defined( VK_USE_PLATFORM_WIN32_KHR )
  9337. //=== VK_NV_external_memory_win32 ===
  9338. template <typename Dispatch>
  9339. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory,
  9340. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType,
  9341. HANDLE * pHandle,
  9342. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9343. {
  9344. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9345. return static_cast<Result>(
  9346. d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) );
  9347. }
  9348. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9349. template <typename Dispatch>
  9350. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV(
  9351. VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d ) const
  9352. {
  9353. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9354. HANDLE handle;
  9355. VkResult result =
  9356. d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle );
  9357. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" );
  9358. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
  9359. }
  9360. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9361. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  9362. //=== VK_KHR_get_physical_device_properties2 ===
  9363. template <typename Dispatch>
  9364. VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures,
  9365. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9366. {
  9367. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9368. d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
  9369. }
  9370. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9371. template <typename Dispatch>
  9372. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2
  9373. PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9374. {
  9375. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9376. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
  9377. d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
  9378. return features;
  9379. }
  9380. template <typename X, typename Y, typename... Z, typename Dispatch>
  9381. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  9382. PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9383. {
  9384. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9385. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  9386. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
  9387. d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
  9388. return structureChain;
  9389. }
  9390. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9391. template <typename Dispatch>
  9392. VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties,
  9393. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9394. {
  9395. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9396. d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
  9397. }
  9398. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9399. template <typename Dispatch>
  9400. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2
  9401. PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9402. {
  9403. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9404. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
  9405. d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
  9406. return properties;
  9407. }
  9408. template <typename X, typename Y, typename... Z, typename Dispatch>
  9409. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  9410. PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9411. {
  9412. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9413. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  9414. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
  9415. d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
  9416. return structureChain;
  9417. }
  9418. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9419. template <typename Dispatch>
  9420. VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format,
  9421. VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties,
  9422. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9423. {
  9424. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9425. d.vkGetPhysicalDeviceFormatProperties2KHR(
  9426. m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
  9427. }
  9428. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9429. template <typename Dispatch>
  9430. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2
  9431. PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9432. {
  9433. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9434. VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
  9435. d.vkGetPhysicalDeviceFormatProperties2KHR(
  9436. m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
  9437. return formatProperties;
  9438. }
  9439. template <typename X, typename Y, typename... Z, typename Dispatch>
  9440. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  9441. PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9442. {
  9443. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9444. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  9445. VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
  9446. d.vkGetPhysicalDeviceFormatProperties2KHR(
  9447. m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
  9448. return structureChain;
  9449. }
  9450. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9451. template <typename Dispatch>
  9452. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  9453. PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo,
  9454. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties,
  9455. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9456. {
  9457. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9458. return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
  9459. reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ),
  9460. reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
  9461. }
  9462. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9463. template <typename Dispatch>
  9464. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type
  9465. PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
  9466. {
  9467. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9468. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
  9469. VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
  9470. reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
  9471. reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
  9472. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
  9473. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
  9474. }
  9475. template <typename X, typename Y, typename... Z, typename Dispatch>
  9476. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type
  9477. PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
  9478. {
  9479. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9480. StructureChain<X, Y, Z...> structureChain;
  9481. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
  9482. VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice,
  9483. reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ),
  9484. reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
  9485. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
  9486. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
  9487. }
  9488. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9489. template <typename Dispatch>
  9490. VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount,
  9491. VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties,
  9492. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9493. {
  9494. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9495. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
  9496. m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
  9497. }
  9498. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9499. template <typename QueueFamilyProperties2Allocator, typename Dispatch>
  9500. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
  9501. PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const
  9502. {
  9503. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9504. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
  9505. uint32_t queueFamilyPropertyCount;
  9506. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  9507. queueFamilyProperties.resize( queueFamilyPropertyCount );
  9508. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
  9509. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  9510. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  9511. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  9512. {
  9513. queueFamilyProperties.resize( queueFamilyPropertyCount );
  9514. }
  9515. return queueFamilyProperties;
  9516. }
  9517. template <typename QueueFamilyProperties2Allocator,
  9518. typename Dispatch,
  9519. typename B1,
  9520. typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties2>::value, int>::type>
  9521. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator>
  9522. PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const
  9523. {
  9524. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9525. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator );
  9526. uint32_t queueFamilyPropertyCount;
  9527. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  9528. queueFamilyProperties.resize( queueFamilyPropertyCount );
  9529. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
  9530. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  9531. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  9532. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  9533. {
  9534. queueFamilyProperties.resize( queueFamilyPropertyCount );
  9535. }
  9536. return queueFamilyProperties;
  9537. }
  9538. template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
  9539. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
  9540. PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const
  9541. {
  9542. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9543. std::vector<StructureChain, StructureChainAllocator> structureChains;
  9544. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
  9545. uint32_t queueFamilyPropertyCount;
  9546. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  9547. structureChains.resize( queueFamilyPropertyCount );
  9548. queueFamilyProperties.resize( queueFamilyPropertyCount );
  9549. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  9550. {
  9551. queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
  9552. }
  9553. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
  9554. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  9555. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  9556. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  9557. {
  9558. structureChains.resize( queueFamilyPropertyCount );
  9559. }
  9560. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  9561. {
  9562. structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
  9563. }
  9564. return structureChains;
  9565. }
  9566. template <typename StructureChain,
  9567. typename StructureChainAllocator,
  9568. typename Dispatch,
  9569. typename B1,
  9570. typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type>
  9571. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator>
  9572. PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const
  9573. {
  9574. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9575. std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
  9576. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
  9577. uint32_t queueFamilyPropertyCount;
  9578. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  9579. structureChains.resize( queueFamilyPropertyCount );
  9580. queueFamilyProperties.resize( queueFamilyPropertyCount );
  9581. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  9582. {
  9583. queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
  9584. }
  9585. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR(
  9586. m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  9587. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  9588. if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
  9589. {
  9590. structureChains.resize( queueFamilyPropertyCount );
  9591. }
  9592. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  9593. {
  9594. structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
  9595. }
  9596. return structureChains;
  9597. }
  9598. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9599. template <typename Dispatch>
  9600. VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties,
  9601. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9602. {
  9603. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9604. d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
  9605. }
  9606. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9607. template <typename Dispatch>
  9608. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2
  9609. PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9610. {
  9611. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9612. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
  9613. d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
  9614. return memoryProperties;
  9615. }
  9616. template <typename X, typename Y, typename... Z, typename Dispatch>
  9617. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  9618. PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9619. {
  9620. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9621. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  9622. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties =
  9623. structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
  9624. d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
  9625. return structureChain;
  9626. }
  9627. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9628. template <typename Dispatch>
  9629. VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,
  9630. uint32_t * pPropertyCount,
  9631. VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties,
  9632. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9633. {
  9634. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9635. d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
  9636. reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ),
  9637. pPropertyCount,
  9638. reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
  9639. }
  9640. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9641. template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
  9642. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
  9643. PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
  9644. Dispatch const & d ) const
  9645. {
  9646. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9647. std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
  9648. uint32_t propertyCount;
  9649. d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
  9650. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
  9651. properties.resize( propertyCount );
  9652. d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
  9653. reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
  9654. &propertyCount,
  9655. reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
  9656. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  9657. if ( propertyCount < properties.size() )
  9658. {
  9659. properties.resize( propertyCount );
  9660. }
  9661. return properties;
  9662. }
  9663. template <typename SparseImageFormatProperties2Allocator,
  9664. typename Dispatch,
  9665. typename B1,
  9666. typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties2>::value, int>::type>
  9667. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator>
  9668. PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo,
  9669. SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator,
  9670. Dispatch const & d ) const
  9671. {
  9672. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9673. std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
  9674. uint32_t propertyCount;
  9675. d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
  9676. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
  9677. properties.resize( propertyCount );
  9678. d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice,
  9679. reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ),
  9680. &propertyCount,
  9681. reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
  9682. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  9683. if ( propertyCount < properties.size() )
  9684. {
  9685. properties.resize( propertyCount );
  9686. }
  9687. return properties;
  9688. }
  9689. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9690. //=== VK_KHR_device_group ===
  9691. template <typename Dispatch>
  9692. VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex,
  9693. uint32_t localDeviceIndex,
  9694. uint32_t remoteDeviceIndex,
  9695. VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures,
  9696. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9697. {
  9698. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9699. d.vkGetDeviceGroupPeerMemoryFeaturesKHR(
  9700. m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
  9701. }
  9702. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9703. template <typename Dispatch>
  9704. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR(
  9705. uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9706. {
  9707. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9708. VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
  9709. d.vkGetDeviceGroupPeerMemoryFeaturesKHR(
  9710. m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
  9711. return peerMemoryFeatures;
  9712. }
  9713. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9714. template <typename Dispatch>
  9715. VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9716. {
  9717. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9718. d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
  9719. }
  9720. template <typename Dispatch>
  9721. VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX,
  9722. uint32_t baseGroupY,
  9723. uint32_t baseGroupZ,
  9724. uint32_t groupCountX,
  9725. uint32_t groupCountY,
  9726. uint32_t groupCountZ,
  9727. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9728. {
  9729. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9730. d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
  9731. }
  9732. #if defined( VK_USE_PLATFORM_VI_NN )
  9733. //=== VK_NN_vi_surface ===
  9734. template <typename Dispatch>
  9735. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo,
  9736. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  9737. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  9738. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9739. {
  9740. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9741. return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance,
  9742. reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ),
  9743. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  9744. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  9745. }
  9746. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9747. template <typename Dispatch>
  9748. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  9749. Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,
  9750. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  9751. Dispatch const & d ) const
  9752. {
  9753. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9754. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  9755. VkResult result =
  9756. d.vkCreateViSurfaceNN( m_instance,
  9757. reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
  9758. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  9759. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  9760. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" );
  9761. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  9762. }
  9763. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  9764. template <typename Dispatch>
  9765. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  9766. Instance::createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo,
  9767. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  9768. Dispatch const & d ) const
  9769. {
  9770. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9771. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  9772. VkResult result =
  9773. d.vkCreateViSurfaceNN( m_instance,
  9774. reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ),
  9775. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  9776. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  9777. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" );
  9778. return createResultValueType(
  9779. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  9780. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  9781. }
  9782. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  9783. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9784. #endif /*VK_USE_PLATFORM_VI_NN*/
  9785. //=== VK_KHR_maintenance1 ===
  9786. template <typename Dispatch>
  9787. VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool,
  9788. VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags,
  9789. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9790. {
  9791. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9792. d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
  9793. }
  9794. //=== VK_KHR_device_group_creation ===
  9795. template <typename Dispatch>
  9796. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  9797. Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount,
  9798. VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties,
  9799. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9800. {
  9801. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9802. return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR(
  9803. m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
  9804. }
  9805. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9806. template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
  9807. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  9808. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
  9809. Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const
  9810. {
  9811. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9812. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
  9813. uint32_t physicalDeviceGroupCount;
  9814. VkResult result;
  9815. do
  9816. {
  9817. result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr );
  9818. if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
  9819. {
  9820. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  9821. result = d.vkEnumeratePhysicalDeviceGroupsKHR(
  9822. m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
  9823. }
  9824. } while ( result == VK_INCOMPLETE );
  9825. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
  9826. VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
  9827. if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
  9828. {
  9829. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  9830. }
  9831. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
  9832. }
  9833. template <typename PhysicalDeviceGroupPropertiesAllocator,
  9834. typename Dispatch,
  9835. typename B1,
  9836. typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
  9837. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  9838. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type
  9839. Instance::enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
  9840. {
  9841. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9842. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties(
  9843. physicalDeviceGroupPropertiesAllocator );
  9844. uint32_t physicalDeviceGroupCount;
  9845. VkResult result;
  9846. do
  9847. {
  9848. result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr );
  9849. if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
  9850. {
  9851. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  9852. result = d.vkEnumeratePhysicalDeviceGroupsKHR(
  9853. m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
  9854. }
  9855. } while ( result == VK_INCOMPLETE );
  9856. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" );
  9857. VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
  9858. if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
  9859. {
  9860. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  9861. }
  9862. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
  9863. }
  9864. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9865. //=== VK_KHR_external_memory_capabilities ===
  9866. template <typename Dispatch>
  9867. VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo,
  9868. VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties,
  9869. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9870. {
  9871. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9872. d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice,
  9873. reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ),
  9874. reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
  9875. }
  9876. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9877. template <typename Dispatch>
  9878. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties
  9879. PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo,
  9880. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9881. {
  9882. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9883. VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
  9884. d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice,
  9885. reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ),
  9886. reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
  9887. return externalBufferProperties;
  9888. }
  9889. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9890. #if defined( VK_USE_PLATFORM_WIN32_KHR )
  9891. //=== VK_KHR_external_memory_win32 ===
  9892. template <typename Dispatch>
  9893. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo,
  9894. HANDLE * pHandle,
  9895. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9896. {
  9897. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9898. return static_cast<Result>(
  9899. d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
  9900. }
  9901. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9902. template <typename Dispatch>
  9903. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
  9904. Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
  9905. {
  9906. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9907. HANDLE handle;
  9908. VkResult result = d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
  9909. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" );
  9910. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
  9911. }
  9912. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9913. template <typename Dispatch>
  9914. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  9915. Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
  9916. HANDLE handle,
  9917. VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties,
  9918. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9919. {
  9920. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9921. return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device,
  9922. static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
  9923. handle,
  9924. reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) );
  9925. }
  9926. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9927. template <typename Dispatch>
  9928. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type
  9929. Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d ) const
  9930. {
  9931. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9932. VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
  9933. VkResult result = d.vkGetMemoryWin32HandlePropertiesKHR( m_device,
  9934. static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
  9935. handle,
  9936. reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) );
  9937. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" );
  9938. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryWin32HandleProperties );
  9939. }
  9940. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9941. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  9942. //=== VK_KHR_external_memory_fd ===
  9943. template <typename Dispatch>
  9944. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo,
  9945. int * pFd,
  9946. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9947. {
  9948. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9949. return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
  9950. }
  9951. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9952. template <typename Dispatch>
  9953. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo,
  9954. Dispatch const & d ) const
  9955. {
  9956. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9957. int fd;
  9958. VkResult result = d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd );
  9959. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" );
  9960. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd );
  9961. }
  9962. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9963. template <typename Dispatch>
  9964. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
  9965. int fd,
  9966. VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties,
  9967. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9968. {
  9969. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9970. return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR(
  9971. m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) );
  9972. }
  9973. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9974. template <typename Dispatch>
  9975. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type
  9976. Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d ) const
  9977. {
  9978. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9979. VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties;
  9980. VkResult result = d.vkGetMemoryFdPropertiesKHR(
  9981. m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) );
  9982. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" );
  9983. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryFdProperties );
  9984. }
  9985. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  9986. //=== VK_KHR_external_semaphore_capabilities ===
  9987. template <typename Dispatch>
  9988. VULKAN_HPP_INLINE void
  9989. PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,
  9990. VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties,
  9991. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  9992. {
  9993. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  9994. d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice,
  9995. reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ),
  9996. reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
  9997. }
  9998. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  9999. template <typename Dispatch>
  10000. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties
  10001. PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo,
  10002. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10003. {
  10004. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10005. VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
  10006. d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice,
  10007. reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ),
  10008. reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
  10009. return externalSemaphoreProperties;
  10010. }
  10011. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10012. #if defined( VK_USE_PLATFORM_WIN32_KHR )
  10013. //=== VK_KHR_external_semaphore_win32 ===
  10014. template <typename Dispatch>
  10015. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR(
  10016. const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10017. {
  10018. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10019. return static_cast<Result>(
  10020. d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) );
  10021. }
  10022. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10023. template <typename Dispatch>
  10024. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  10025. Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo,
  10026. Dispatch const & d ) const
  10027. {
  10028. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10029. VkResult result =
  10030. d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) );
  10031. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" );
  10032. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  10033. }
  10034. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10035. template <typename Dispatch>
  10036. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR(
  10037. const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10038. {
  10039. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10040. return static_cast<Result>(
  10041. d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
  10042. }
  10043. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10044. template <typename Dispatch>
  10045. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
  10046. Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
  10047. {
  10048. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10049. HANDLE handle;
  10050. VkResult result = d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
  10051. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" );
  10052. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
  10053. }
  10054. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10055. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  10056. //=== VK_KHR_external_semaphore_fd ===
  10057. template <typename Dispatch>
  10058. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo,
  10059. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10060. {
  10061. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10062. return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) );
  10063. }
  10064. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10065. template <typename Dispatch>
  10066. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  10067. Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const
  10068. {
  10069. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10070. VkResult result = d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) );
  10071. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" );
  10072. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  10073. }
  10074. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10075. template <typename Dispatch>
  10076. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo,
  10077. int * pFd,
  10078. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10079. {
  10080. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10081. return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
  10082. }
  10083. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10084. template <typename Dispatch>
  10085. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type
  10086. Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
  10087. {
  10088. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10089. int fd;
  10090. VkResult result = d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd );
  10091. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" );
  10092. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd );
  10093. }
  10094. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10095. //=== VK_KHR_push_descriptor ===
  10096. template <typename Dispatch>
  10097. VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  10098. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  10099. uint32_t set,
  10100. uint32_t descriptorWriteCount,
  10101. const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites,
  10102. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10103. {
  10104. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10105. d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
  10106. static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
  10107. static_cast<VkPipelineLayout>( layout ),
  10108. set,
  10109. descriptorWriteCount,
  10110. reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) );
  10111. }
  10112. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10113. template <typename Dispatch>
  10114. VULKAN_HPP_INLINE void
  10115. CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  10116. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  10117. uint32_t set,
  10118. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites,
  10119. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10120. {
  10121. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10122. d.vkCmdPushDescriptorSetKHR( m_commandBuffer,
  10123. static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
  10124. static_cast<VkPipelineLayout>( layout ),
  10125. set,
  10126. descriptorWrites.size(),
  10127. reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) );
  10128. }
  10129. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10130. template <typename Dispatch>
  10131. VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  10132. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  10133. uint32_t set,
  10134. const void * pData,
  10135. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10136. {
  10137. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10138. d.vkCmdPushDescriptorSetWithTemplateKHR(
  10139. m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
  10140. }
  10141. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10142. template <typename DataType, typename Dispatch>
  10143. VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  10144. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  10145. uint32_t set,
  10146. DataType const & data,
  10147. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10148. {
  10149. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10150. d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer,
  10151. static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
  10152. static_cast<VkPipelineLayout>( layout ),
  10153. set,
  10154. reinterpret_cast<const void *>( &data ) );
  10155. }
  10156. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10157. //=== VK_EXT_conditional_rendering ===
  10158. template <typename Dispatch>
  10159. VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin,
  10160. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10161. {
  10162. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10163. d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) );
  10164. }
  10165. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10166. template <typename Dispatch>
  10167. VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin,
  10168. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10169. {
  10170. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10171. d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) );
  10172. }
  10173. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10174. template <typename Dispatch>
  10175. VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10176. {
  10177. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10178. d.vkCmdEndConditionalRenderingEXT( m_commandBuffer );
  10179. }
  10180. //=== VK_KHR_descriptor_update_template ===
  10181. template <typename Dispatch>
  10182. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  10183. Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo,
  10184. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  10185. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate,
  10186. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10187. {
  10188. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10189. return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device,
  10190. reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ),
  10191. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  10192. reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
  10193. }
  10194. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10195. template <typename Dispatch>
  10196. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type
  10197. Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
  10198. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  10199. Dispatch const & d ) const
  10200. {
  10201. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10202. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
  10203. VkResult result = d.vkCreateDescriptorUpdateTemplateKHR(
  10204. m_device,
  10205. reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
  10206. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  10207. reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
  10208. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" );
  10209. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorUpdateTemplate );
  10210. }
  10211. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  10212. template <typename Dispatch>
  10213. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type
  10214. Device::createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo,
  10215. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  10216. Dispatch const & d ) const
  10217. {
  10218. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10219. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
  10220. VkResult result = d.vkCreateDescriptorUpdateTemplateKHR(
  10221. m_device,
  10222. reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ),
  10223. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  10224. reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
  10225. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" );
  10226. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  10227. UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>(
  10228. descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  10229. }
  10230. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  10231. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10232. template <typename Dispatch>
  10233. VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  10234. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  10235. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10236. {
  10237. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10238. d.vkDestroyDescriptorUpdateTemplateKHR(
  10239. m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  10240. }
  10241. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10242. template <typename Dispatch>
  10243. VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  10244. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  10245. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10246. {
  10247. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10248. d.vkDestroyDescriptorUpdateTemplateKHR(
  10249. m_device,
  10250. static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
  10251. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  10252. }
  10253. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10254. template <typename Dispatch>
  10255. VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
  10256. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  10257. const void * pData,
  10258. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10259. {
  10260. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10261. d.vkUpdateDescriptorSetWithTemplateKHR(
  10262. m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
  10263. }
  10264. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10265. template <typename DataType, typename Dispatch>
  10266. VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
  10267. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate,
  10268. DataType const & data,
  10269. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10270. {
  10271. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10272. d.vkUpdateDescriptorSetWithTemplateKHR( m_device,
  10273. static_cast<VkDescriptorSet>( descriptorSet ),
  10274. static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ),
  10275. reinterpret_cast<const void *>( &data ) );
  10276. }
  10277. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10278. //=== VK_NV_clip_space_w_scaling ===
  10279. template <typename Dispatch>
  10280. VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport,
  10281. uint32_t viewportCount,
  10282. const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings,
  10283. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10284. {
  10285. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10286. d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) );
  10287. }
  10288. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10289. template <typename Dispatch>
  10290. VULKAN_HPP_INLINE void
  10291. CommandBuffer::setViewportWScalingNV( uint32_t firstViewport,
  10292. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings,
  10293. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10294. {
  10295. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10296. d.vkCmdSetViewportWScalingNV(
  10297. m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) );
  10298. }
  10299. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10300. //=== VK_EXT_direct_mode_display ===
  10301. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10302. template <typename Dispatch>
  10303. VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10304. {
  10305. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10306. return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
  10307. }
  10308. #else
  10309. template <typename Dispatch>
  10310. VULKAN_HPP_INLINE void PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10311. {
  10312. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10313. d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) );
  10314. }
  10315. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10316. #if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
  10317. //=== VK_EXT_acquire_xlib_display ===
  10318. template <typename Dispatch>
  10319. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display * dpy,
  10320. VULKAN_HPP_NAMESPACE::DisplayKHR display,
  10321. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10322. {
  10323. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10324. return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
  10325. }
  10326. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10327. template <typename Dispatch>
  10328. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  10329. PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  10330. {
  10331. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10332. VkResult result = d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) );
  10333. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" );
  10334. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  10335. }
  10336. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10337. template <typename Dispatch>
  10338. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display * dpy,
  10339. RROutput rrOutput,
  10340. VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
  10341. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10342. {
  10343. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10344. return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
  10345. }
  10346. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10347. template <typename Dispatch>
  10348. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
  10349. PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
  10350. {
  10351. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10352. VULKAN_HPP_NAMESPACE::DisplayKHR display;
  10353. VkResult result = d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) );
  10354. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" );
  10355. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), display );
  10356. }
  10357. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  10358. template <typename Dispatch>
  10359. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
  10360. PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
  10361. {
  10362. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10363. VULKAN_HPP_NAMESPACE::DisplayKHR display;
  10364. VkResult result = d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) );
  10365. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" );
  10366. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  10367. UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) );
  10368. }
  10369. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  10370. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10371. #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
  10372. //=== VK_EXT_display_surface_counter ===
  10373. template <typename Dispatch>
  10374. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  10375. PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface,
  10376. VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities,
  10377. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10378. {
  10379. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10380. return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
  10381. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) );
  10382. }
  10383. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10384. template <typename Dispatch>
  10385. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type
  10386. PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  10387. {
  10388. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10389. VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities;
  10390. VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2EXT(
  10391. m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) );
  10392. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" );
  10393. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
  10394. }
  10395. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10396. //=== VK_EXT_display_control ===
  10397. template <typename Dispatch>
  10398. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  10399. const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo,
  10400. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10401. {
  10402. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10403. return static_cast<Result>(
  10404. d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) );
  10405. }
  10406. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10407. template <typename Dispatch>
  10408. VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  10409. const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo,
  10410. Dispatch const & d ) const
  10411. {
  10412. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10413. VkResult result =
  10414. d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) );
  10415. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" );
  10416. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  10417. }
  10418. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10419. template <typename Dispatch>
  10420. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo,
  10421. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  10422. VULKAN_HPP_NAMESPACE::Fence * pFence,
  10423. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10424. {
  10425. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10426. return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device,
  10427. reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ),
  10428. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  10429. reinterpret_cast<VkFence *>( pFence ) ) );
  10430. }
  10431. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10432. template <typename Dispatch>
  10433. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
  10434. Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,
  10435. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  10436. Dispatch const & d ) const
  10437. {
  10438. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10439. VULKAN_HPP_NAMESPACE::Fence fence;
  10440. VkResult result = d.vkRegisterDeviceEventEXT(
  10441. m_device,
  10442. reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
  10443. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  10444. reinterpret_cast<VkFence *>( &fence ) );
  10445. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" );
  10446. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
  10447. }
  10448. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  10449. template <typename Dispatch>
  10450. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
  10451. Device::registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo,
  10452. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  10453. Dispatch const & d ) const
  10454. {
  10455. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10456. VULKAN_HPP_NAMESPACE::Fence fence;
  10457. VkResult result = d.vkRegisterDeviceEventEXT(
  10458. m_device,
  10459. reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ),
  10460. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  10461. reinterpret_cast<VkFence *>( &fence ) );
  10462. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" );
  10463. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  10464. UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  10465. }
  10466. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  10467. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10468. template <typename Dispatch>
  10469. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  10470. const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo,
  10471. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  10472. VULKAN_HPP_NAMESPACE::Fence * pFence,
  10473. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10474. {
  10475. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10476. return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device,
  10477. static_cast<VkDisplayKHR>( display ),
  10478. reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ),
  10479. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  10480. reinterpret_cast<VkFence *>( pFence ) ) );
  10481. }
  10482. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10483. template <typename Dispatch>
  10484. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type
  10485. Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  10486. const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,
  10487. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  10488. Dispatch const & d ) const
  10489. {
  10490. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10491. VULKAN_HPP_NAMESPACE::Fence fence;
  10492. VkResult result = d.vkRegisterDisplayEventEXT(
  10493. m_device,
  10494. static_cast<VkDisplayKHR>( display ),
  10495. reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
  10496. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  10497. reinterpret_cast<VkFence *>( &fence ) );
  10498. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" );
  10499. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
  10500. }
  10501. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  10502. template <typename Dispatch>
  10503. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type
  10504. Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  10505. const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo,
  10506. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  10507. Dispatch const & d ) const
  10508. {
  10509. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10510. VULKAN_HPP_NAMESPACE::Fence fence;
  10511. VkResult result = d.vkRegisterDisplayEventEXT(
  10512. m_device,
  10513. static_cast<VkDisplayKHR>( display ),
  10514. reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ),
  10515. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  10516. reinterpret_cast<VkFence *>( &fence ) );
  10517. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" );
  10518. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  10519. UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  10520. }
  10521. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  10522. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10523. template <typename Dispatch>
  10524. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  10525. VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter,
  10526. uint64_t * pCounterValue,
  10527. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10528. {
  10529. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10530. return static_cast<Result>(
  10531. d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
  10532. }
  10533. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10534. template <typename Dispatch>
  10535. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT(
  10536. VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const
  10537. {
  10538. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10539. uint64_t counterValue;
  10540. VkResult result =
  10541. d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue );
  10542. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" );
  10543. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), counterValue );
  10544. }
  10545. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10546. //=== VK_GOOGLE_display_timing ===
  10547. template <typename Dispatch>
  10548. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  10549. Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  10550. VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties,
  10551. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10552. {
  10553. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10554. return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE(
  10555. m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) );
  10556. }
  10557. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10558. template <typename Dispatch>
  10559. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type
  10560. Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  10561. {
  10562. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10563. VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties;
  10564. VkResult result = d.vkGetRefreshCycleDurationGOOGLE(
  10565. m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) );
  10566. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" );
  10567. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displayTimingProperties );
  10568. }
  10569. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10570. template <typename Dispatch>
  10571. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  10572. Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  10573. uint32_t * pPresentationTimingCount,
  10574. VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings,
  10575. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10576. {
  10577. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10578. return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device,
  10579. static_cast<VkSwapchainKHR>( swapchain ),
  10580. pPresentationTimingCount,
  10581. reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) );
  10582. }
  10583. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10584. template <typename PastPresentationTimingGOOGLEAllocator, typename Dispatch>
  10585. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  10586. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
  10587. Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  10588. {
  10589. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10590. std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings;
  10591. uint32_t presentationTimingCount;
  10592. VkResult result;
  10593. do
  10594. {
  10595. result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr );
  10596. if ( ( result == VK_SUCCESS ) && presentationTimingCount )
  10597. {
  10598. presentationTimings.resize( presentationTimingCount );
  10599. result = d.vkGetPastPresentationTimingGOOGLE( m_device,
  10600. static_cast<VkSwapchainKHR>( swapchain ),
  10601. &presentationTimingCount,
  10602. reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) );
  10603. }
  10604. } while ( result == VK_INCOMPLETE );
  10605. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
  10606. VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
  10607. if ( presentationTimingCount < presentationTimings.size() )
  10608. {
  10609. presentationTimings.resize( presentationTimingCount );
  10610. }
  10611. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentationTimings );
  10612. }
  10613. template <typename PastPresentationTimingGOOGLEAllocator,
  10614. typename Dispatch,
  10615. typename B1,
  10616. typename std::enable_if<std::is_same<typename B1::value_type, PastPresentationTimingGOOGLE>::value, int>::type>
  10617. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  10618. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type
  10619. Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  10620. PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator,
  10621. Dispatch const & d ) const
  10622. {
  10623. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10624. std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings(
  10625. pastPresentationTimingGOOGLEAllocator );
  10626. uint32_t presentationTimingCount;
  10627. VkResult result;
  10628. do
  10629. {
  10630. result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr );
  10631. if ( ( result == VK_SUCCESS ) && presentationTimingCount )
  10632. {
  10633. presentationTimings.resize( presentationTimingCount );
  10634. result = d.vkGetPastPresentationTimingGOOGLE( m_device,
  10635. static_cast<VkSwapchainKHR>( swapchain ),
  10636. &presentationTimingCount,
  10637. reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) );
  10638. }
  10639. } while ( result == VK_INCOMPLETE );
  10640. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" );
  10641. VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
  10642. if ( presentationTimingCount < presentationTimings.size() )
  10643. {
  10644. presentationTimings.resize( presentationTimingCount );
  10645. }
  10646. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentationTimings );
  10647. }
  10648. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10649. //=== VK_EXT_discard_rectangles ===
  10650. template <typename Dispatch>
  10651. VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
  10652. uint32_t discardRectangleCount,
  10653. const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles,
  10654. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10655. {
  10656. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10657. d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) );
  10658. }
  10659. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10660. template <typename Dispatch>
  10661. VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle,
  10662. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles,
  10663. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10664. {
  10665. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10666. d.vkCmdSetDiscardRectangleEXT(
  10667. m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) );
  10668. }
  10669. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10670. template <typename Dispatch>
  10671. VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 discardRectangleEnable,
  10672. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10673. {
  10674. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10675. d.vkCmdSetDiscardRectangleEnableEXT( m_commandBuffer, static_cast<VkBool32>( discardRectangleEnable ) );
  10676. }
  10677. template <typename Dispatch>
  10678. VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleModeEXT( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode,
  10679. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10680. {
  10681. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10682. d.vkCmdSetDiscardRectangleModeEXT( m_commandBuffer, static_cast<VkDiscardRectangleModeEXT>( discardRectangleMode ) );
  10683. }
  10684. //=== VK_EXT_hdr_metadata ===
  10685. template <typename Dispatch>
  10686. VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount,
  10687. const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains,
  10688. const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata,
  10689. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10690. {
  10691. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10692. d.vkSetHdrMetadataEXT(
  10693. m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) );
  10694. }
  10695. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10696. template <typename Dispatch>
  10697. VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains,
  10698. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata,
  10699. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  10700. {
  10701. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10702. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  10703. VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() );
  10704. # else
  10705. if ( swapchains.size() != metadata.size() )
  10706. {
  10707. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
  10708. }
  10709. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  10710. d.vkSetHdrMetadataEXT( m_device,
  10711. swapchains.size(),
  10712. reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ),
  10713. reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) );
  10714. }
  10715. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10716. //=== VK_KHR_create_renderpass2 ===
  10717. template <typename Dispatch>
  10718. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo,
  10719. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  10720. VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass,
  10721. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10722. {
  10723. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10724. return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device,
  10725. reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ),
  10726. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  10727. reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
  10728. }
  10729. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10730. template <typename Dispatch>
  10731. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type
  10732. Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
  10733. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  10734. Dispatch const & d ) const
  10735. {
  10736. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10737. VULKAN_HPP_NAMESPACE::RenderPass renderPass;
  10738. VkResult result =
  10739. d.vkCreateRenderPass2KHR( m_device,
  10740. reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
  10741. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  10742. reinterpret_cast<VkRenderPass *>( &renderPass ) );
  10743. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" );
  10744. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
  10745. }
  10746. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  10747. template <typename Dispatch>
  10748. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type
  10749. Device::createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo,
  10750. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  10751. Dispatch const & d ) const
  10752. {
  10753. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10754. VULKAN_HPP_NAMESPACE::RenderPass renderPass;
  10755. VkResult result =
  10756. d.vkCreateRenderPass2KHR( m_device,
  10757. reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ),
  10758. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  10759. reinterpret_cast<VkRenderPass *>( &renderPass ) );
  10760. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" );
  10761. return createResultValueType(
  10762. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  10763. UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  10764. }
  10765. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  10766. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10767. template <typename Dispatch>
  10768. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin,
  10769. const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
  10770. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10771. {
  10772. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10773. d.vkCmdBeginRenderPass2KHR(
  10774. m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
  10775. }
  10776. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10777. template <typename Dispatch>
  10778. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin,
  10779. const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
  10780. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10781. {
  10782. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10783. d.vkCmdBeginRenderPass2KHR(
  10784. m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
  10785. }
  10786. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10787. template <typename Dispatch>
  10788. VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo,
  10789. const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
  10790. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10791. {
  10792. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10793. d.vkCmdNextSubpass2KHR(
  10794. m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
  10795. }
  10796. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10797. template <typename Dispatch>
  10798. VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo,
  10799. const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
  10800. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10801. {
  10802. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10803. d.vkCmdNextSubpass2KHR(
  10804. m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  10805. }
  10806. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10807. template <typename Dispatch>
  10808. VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo,
  10809. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10810. {
  10811. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10812. d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
  10813. }
  10814. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10815. template <typename Dispatch>
  10816. VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo,
  10817. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10818. {
  10819. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10820. d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  10821. }
  10822. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10823. //=== VK_KHR_shared_presentable_image ===
  10824. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10825. template <typename Dispatch>
  10826. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  10827. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10828. {
  10829. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10830. return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
  10831. }
  10832. #else
  10833. template <typename Dispatch>
  10834. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  10835. Dispatch const & d ) const
  10836. {
  10837. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10838. VkResult result = d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
  10839. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  10840. VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR",
  10841. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  10842. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  10843. }
  10844. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  10845. //=== VK_KHR_external_fence_capabilities ===
  10846. template <typename Dispatch>
  10847. VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo,
  10848. VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties,
  10849. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10850. {
  10851. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10852. d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice,
  10853. reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ),
  10854. reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
  10855. }
  10856. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10857. template <typename Dispatch>
  10858. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties
  10859. PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo,
  10860. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10861. {
  10862. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10863. VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
  10864. d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice,
  10865. reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ),
  10866. reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
  10867. return externalFenceProperties;
  10868. }
  10869. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10870. #if defined( VK_USE_PLATFORM_WIN32_KHR )
  10871. //=== VK_KHR_external_fence_win32 ===
  10872. template <typename Dispatch>
  10873. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR(
  10874. const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10875. {
  10876. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10877. return static_cast<Result>(
  10878. d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) );
  10879. }
  10880. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10881. template <typename Dispatch>
  10882. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  10883. Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const
  10884. {
  10885. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10886. VkResult result = d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) );
  10887. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" );
  10888. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  10889. }
  10890. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10891. template <typename Dispatch>
  10892. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo,
  10893. HANDLE * pHandle,
  10894. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10895. {
  10896. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10897. return static_cast<Result>(
  10898. d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
  10899. }
  10900. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10901. template <typename Dispatch>
  10902. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type
  10903. Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
  10904. {
  10905. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10906. HANDLE handle;
  10907. VkResult result = d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle );
  10908. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" );
  10909. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle );
  10910. }
  10911. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10912. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  10913. //=== VK_KHR_external_fence_fd ===
  10914. template <typename Dispatch>
  10915. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo,
  10916. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10917. {
  10918. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10919. return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) );
  10920. }
  10921. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10922. template <typename Dispatch>
  10923. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  10924. Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const
  10925. {
  10926. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10927. VkResult result = d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) );
  10928. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" );
  10929. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  10930. }
  10931. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10932. template <typename Dispatch>
  10933. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo,
  10934. int * pFd,
  10935. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10936. {
  10937. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10938. return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
  10939. }
  10940. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10941. template <typename Dispatch>
  10942. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo,
  10943. Dispatch const & d ) const
  10944. {
  10945. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10946. int fd;
  10947. VkResult result = d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd );
  10948. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" );
  10949. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd );
  10950. }
  10951. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  10952. //=== VK_KHR_performance_query ===
  10953. template <typename Dispatch>
  10954. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  10955. PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex,
  10956. uint32_t * pCounterCount,
  10957. VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters,
  10958. VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions,
  10959. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  10960. {
  10961. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10962. return static_cast<Result>(
  10963. d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice,
  10964. queueFamilyIndex,
  10965. pCounterCount,
  10966. reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ),
  10967. reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) );
  10968. }
  10969. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  10970. template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch>
  10971. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  10972. typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
  10973. std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
  10974. PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const
  10975. {
  10976. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  10977. std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
  10978. std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
  10979. data_;
  10980. std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data_.first;
  10981. std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data_.second;
  10982. uint32_t counterCount;
  10983. VkResult result;
  10984. do
  10985. {
  10986. result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr );
  10987. if ( ( result == VK_SUCCESS ) && counterCount )
  10988. {
  10989. counters.resize( counterCount );
  10990. counterDescriptions.resize( counterCount );
  10991. result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
  10992. m_physicalDevice,
  10993. queueFamilyIndex,
  10994. &counterCount,
  10995. reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
  10996. reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) );
  10997. }
  10998. } while ( result == VK_INCOMPLETE );
  10999. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  11000. VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
  11001. VULKAN_HPP_ASSERT( counterCount <= counters.size() );
  11002. if ( counterCount < counters.size() )
  11003. {
  11004. counters.resize( counterCount );
  11005. counterDescriptions.resize( counterCount );
  11006. }
  11007. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
  11008. }
  11009. template <typename PerformanceCounterKHRAllocator,
  11010. typename PerformanceCounterDescriptionKHRAllocator,
  11011. typename Dispatch,
  11012. typename B1,
  11013. typename B2,
  11014. typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value &&
  11015. std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value,
  11016. int>::type>
  11017. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  11018. typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
  11019. std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type
  11020. PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex,
  11021. PerformanceCounterKHRAllocator & performanceCounterKHRAllocator,
  11022. PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator,
  11023. Dispatch const & d ) const
  11024. {
  11025. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11026. std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>,
  11027. std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>
  11028. data_(
  11029. std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) );
  11030. std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data_.first;
  11031. std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data_.second;
  11032. uint32_t counterCount;
  11033. VkResult result;
  11034. do
  11035. {
  11036. result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr );
  11037. if ( ( result == VK_SUCCESS ) && counterCount )
  11038. {
  11039. counters.resize( counterCount );
  11040. counterDescriptions.resize( counterCount );
  11041. result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
  11042. m_physicalDevice,
  11043. queueFamilyIndex,
  11044. &counterCount,
  11045. reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ),
  11046. reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) );
  11047. }
  11048. } while ( result == VK_INCOMPLETE );
  11049. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  11050. VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
  11051. VULKAN_HPP_ASSERT( counterCount <= counters.size() );
  11052. if ( counterCount < counters.size() )
  11053. {
  11054. counters.resize( counterCount );
  11055. counterDescriptions.resize( counterCount );
  11056. }
  11057. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
  11058. }
  11059. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11060. template <typename Dispatch>
  11061. VULKAN_HPP_INLINE void
  11062. PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo,
  11063. uint32_t * pNumPasses,
  11064. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11065. {
  11066. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11067. d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
  11068. m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), pNumPasses );
  11069. }
  11070. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11071. template <typename Dispatch>
  11072. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR(
  11073. const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11074. {
  11075. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11076. uint32_t numPasses;
  11077. d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
  11078. m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ), &numPasses );
  11079. return numPasses;
  11080. }
  11081. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11082. template <typename Dispatch>
  11083. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo,
  11084. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11085. {
  11086. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11087. return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) );
  11088. }
  11089. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11090. template <typename Dispatch>
  11091. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  11092. Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const
  11093. {
  11094. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11095. VkResult result = d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) );
  11096. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" );
  11097. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  11098. }
  11099. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11100. template <typename Dispatch>
  11101. VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11102. {
  11103. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11104. d.vkReleaseProfilingLockKHR( m_device );
  11105. }
  11106. //=== VK_KHR_get_surface_capabilities2 ===
  11107. template <typename Dispatch>
  11108. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  11109. PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
  11110. VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities,
  11111. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11112. {
  11113. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11114. return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
  11115. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
  11116. reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) );
  11117. }
  11118. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11119. template <typename Dispatch>
  11120. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type
  11121. PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
  11122. {
  11123. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11124. VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities;
  11125. VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
  11126. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  11127. reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
  11128. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
  11129. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities );
  11130. }
  11131. template <typename X, typename Y, typename... Z, typename Dispatch>
  11132. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type
  11133. PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
  11134. {
  11135. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11136. StructureChain<X, Y, Z...> structureChain;
  11137. VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
  11138. VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice,
  11139. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  11140. reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) );
  11141. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
  11142. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
  11143. }
  11144. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11145. template <typename Dispatch>
  11146. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
  11147. uint32_t * pSurfaceFormatCount,
  11148. VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats,
  11149. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11150. {
  11151. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11152. return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
  11153. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
  11154. pSurfaceFormatCount,
  11155. reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) );
  11156. }
  11157. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11158. template <typename SurfaceFormat2KHRAllocator, typename Dispatch>
  11159. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
  11160. PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
  11161. {
  11162. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11163. std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats;
  11164. uint32_t surfaceFormatCount;
  11165. VkResult result;
  11166. do
  11167. {
  11168. result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
  11169. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
  11170. if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
  11171. {
  11172. surfaceFormats.resize( surfaceFormatCount );
  11173. result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
  11174. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  11175. &surfaceFormatCount,
  11176. reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
  11177. }
  11178. } while ( result == VK_INCOMPLETE );
  11179. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
  11180. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  11181. if ( surfaceFormatCount < surfaceFormats.size() )
  11182. {
  11183. surfaceFormats.resize( surfaceFormatCount );
  11184. }
  11185. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
  11186. }
  11187. template <typename SurfaceFormat2KHRAllocator,
  11188. typename Dispatch,
  11189. typename B1,
  11190. typename std::enable_if<std::is_same<typename B1::value_type, SurfaceFormat2KHR>::value, int>::type>
  11191. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type
  11192. PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
  11193. SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator,
  11194. Dispatch const & d ) const
  11195. {
  11196. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11197. std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator );
  11198. uint32_t surfaceFormatCount;
  11199. VkResult result;
  11200. do
  11201. {
  11202. result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
  11203. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
  11204. if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
  11205. {
  11206. surfaceFormats.resize( surfaceFormatCount );
  11207. result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
  11208. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  11209. &surfaceFormatCount,
  11210. reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
  11211. }
  11212. } while ( result == VK_INCOMPLETE );
  11213. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
  11214. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  11215. if ( surfaceFormatCount < surfaceFormats.size() )
  11216. {
  11217. surfaceFormats.resize( surfaceFormatCount );
  11218. }
  11219. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats );
  11220. }
  11221. template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
  11222. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type
  11223. PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
  11224. {
  11225. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11226. std::vector<StructureChain, StructureChainAllocator> structureChains;
  11227. std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
  11228. uint32_t surfaceFormatCount;
  11229. VkResult result;
  11230. do
  11231. {
  11232. result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
  11233. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
  11234. if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
  11235. {
  11236. structureChains.resize( surfaceFormatCount );
  11237. surfaceFormats.resize( surfaceFormatCount );
  11238. for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
  11239. {
  11240. surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
  11241. }
  11242. result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
  11243. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  11244. &surfaceFormatCount,
  11245. reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
  11246. }
  11247. } while ( result == VK_INCOMPLETE );
  11248. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
  11249. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  11250. if ( surfaceFormatCount < surfaceFormats.size() )
  11251. {
  11252. structureChains.resize( surfaceFormatCount );
  11253. }
  11254. for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
  11255. {
  11256. structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
  11257. }
  11258. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChains );
  11259. }
  11260. template <typename StructureChain,
  11261. typename StructureChainAllocator,
  11262. typename Dispatch,
  11263. typename B1,
  11264. typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type>
  11265. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type
  11266. PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
  11267. StructureChainAllocator & structureChainAllocator,
  11268. Dispatch const & d ) const
  11269. {
  11270. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11271. std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
  11272. std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats;
  11273. uint32_t surfaceFormatCount;
  11274. VkResult result;
  11275. do
  11276. {
  11277. result = d.vkGetPhysicalDeviceSurfaceFormats2KHR(
  11278. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr );
  11279. if ( ( result == VK_SUCCESS ) && surfaceFormatCount )
  11280. {
  11281. structureChains.resize( surfaceFormatCount );
  11282. surfaceFormats.resize( surfaceFormatCount );
  11283. for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
  11284. {
  11285. surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext;
  11286. }
  11287. result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice,
  11288. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  11289. &surfaceFormatCount,
  11290. reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) );
  11291. }
  11292. } while ( result == VK_INCOMPLETE );
  11293. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" );
  11294. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  11295. if ( surfaceFormatCount < surfaceFormats.size() )
  11296. {
  11297. structureChains.resize( surfaceFormatCount );
  11298. }
  11299. for ( uint32_t i = 0; i < surfaceFormatCount; i++ )
  11300. {
  11301. structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i];
  11302. }
  11303. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChains );
  11304. }
  11305. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11306. //=== VK_KHR_get_display_properties2 ===
  11307. template <typename Dispatch>
  11308. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount,
  11309. VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties,
  11310. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11311. {
  11312. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11313. return static_cast<Result>(
  11314. d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) );
  11315. }
  11316. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11317. template <typename DisplayProperties2KHRAllocator, typename Dispatch>
  11318. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  11319. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
  11320. PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const
  11321. {
  11322. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11323. std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties;
  11324. uint32_t propertyCount;
  11325. VkResult result;
  11326. do
  11327. {
  11328. result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
  11329. if ( ( result == VK_SUCCESS ) && propertyCount )
  11330. {
  11331. properties.resize( propertyCount );
  11332. result =
  11333. d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) );
  11334. }
  11335. } while ( result == VK_INCOMPLETE );
  11336. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
  11337. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  11338. if ( propertyCount < properties.size() )
  11339. {
  11340. properties.resize( propertyCount );
  11341. }
  11342. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  11343. }
  11344. template <typename DisplayProperties2KHRAllocator,
  11345. typename Dispatch,
  11346. typename B1,
  11347. typename std::enable_if<std::is_same<typename B1::value_type, DisplayProperties2KHR>::value, int>::type>
  11348. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  11349. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type
  11350. PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d ) const
  11351. {
  11352. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11353. std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator );
  11354. uint32_t propertyCount;
  11355. VkResult result;
  11356. do
  11357. {
  11358. result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
  11359. if ( ( result == VK_SUCCESS ) && propertyCount )
  11360. {
  11361. properties.resize( propertyCount );
  11362. result =
  11363. d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) );
  11364. }
  11365. } while ( result == VK_INCOMPLETE );
  11366. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" );
  11367. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  11368. if ( propertyCount < properties.size() )
  11369. {
  11370. properties.resize( propertyCount );
  11371. }
  11372. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  11373. }
  11374. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11375. template <typename Dispatch>
  11376. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount,
  11377. VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties,
  11378. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11379. {
  11380. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11381. return static_cast<Result>(
  11382. d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) );
  11383. }
  11384. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11385. template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch>
  11386. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  11387. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
  11388. PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const
  11389. {
  11390. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11391. std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties;
  11392. uint32_t propertyCount;
  11393. VkResult result;
  11394. do
  11395. {
  11396. result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
  11397. if ( ( result == VK_SUCCESS ) && propertyCount )
  11398. {
  11399. properties.resize( propertyCount );
  11400. result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
  11401. m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) );
  11402. }
  11403. } while ( result == VK_INCOMPLETE );
  11404. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
  11405. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  11406. if ( propertyCount < properties.size() )
  11407. {
  11408. properties.resize( propertyCount );
  11409. }
  11410. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  11411. }
  11412. template <typename DisplayPlaneProperties2KHRAllocator,
  11413. typename Dispatch,
  11414. typename B1,
  11415. typename std::enable_if<std::is_same<typename B1::value_type, DisplayPlaneProperties2KHR>::value, int>::type>
  11416. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  11417. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type
  11418. PhysicalDevice::getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const
  11419. {
  11420. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11421. std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties( displayPlaneProperties2KHRAllocator );
  11422. uint32_t propertyCount;
  11423. VkResult result;
  11424. do
  11425. {
  11426. result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr );
  11427. if ( ( result == VK_SUCCESS ) && propertyCount )
  11428. {
  11429. properties.resize( propertyCount );
  11430. result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
  11431. m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) );
  11432. }
  11433. } while ( result == VK_INCOMPLETE );
  11434. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" );
  11435. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  11436. if ( propertyCount < properties.size() )
  11437. {
  11438. properties.resize( propertyCount );
  11439. }
  11440. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  11441. }
  11442. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11443. template <typename Dispatch>
  11444. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  11445. uint32_t * pPropertyCount,
  11446. VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR * pProperties,
  11447. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11448. {
  11449. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11450. return static_cast<Result>( d.vkGetDisplayModeProperties2KHR(
  11451. m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( pProperties ) ) );
  11452. }
  11453. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11454. template <typename DisplayModeProperties2KHRAllocator, typename Dispatch>
  11455. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  11456. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
  11457. PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  11458. {
  11459. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11460. std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties;
  11461. uint32_t propertyCount;
  11462. VkResult result;
  11463. do
  11464. {
  11465. result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
  11466. if ( ( result == VK_SUCCESS ) && propertyCount )
  11467. {
  11468. properties.resize( propertyCount );
  11469. result = d.vkGetDisplayModeProperties2KHR(
  11470. m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) );
  11471. }
  11472. } while ( result == VK_INCOMPLETE );
  11473. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
  11474. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  11475. if ( propertyCount < properties.size() )
  11476. {
  11477. properties.resize( propertyCount );
  11478. }
  11479. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  11480. }
  11481. template <typename DisplayModeProperties2KHRAllocator,
  11482. typename Dispatch,
  11483. typename B1,
  11484. typename std::enable_if<std::is_same<typename B1::value_type, DisplayModeProperties2KHR>::value, int>::type>
  11485. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  11486. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type
  11487. PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  11488. DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator,
  11489. Dispatch const & d ) const
  11490. {
  11491. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11492. std::vector<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties( displayModeProperties2KHRAllocator );
  11493. uint32_t propertyCount;
  11494. VkResult result;
  11495. do
  11496. {
  11497. result = d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr );
  11498. if ( ( result == VK_SUCCESS ) && propertyCount )
  11499. {
  11500. properties.resize( propertyCount );
  11501. result = d.vkGetDisplayModeProperties2KHR(
  11502. m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) );
  11503. }
  11504. } while ( result == VK_INCOMPLETE );
  11505. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModeProperties2KHR" );
  11506. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  11507. if ( propertyCount < properties.size() )
  11508. {
  11509. properties.resize( propertyCount );
  11510. }
  11511. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  11512. }
  11513. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11514. template <typename Dispatch>
  11515. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  11516. PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR * pDisplayPlaneInfo,
  11517. VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR * pCapabilities,
  11518. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11519. {
  11520. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11521. return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
  11522. reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ),
  11523. reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) );
  11524. }
  11525. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11526. template <typename Dispatch>
  11527. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type
  11528. PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const
  11529. {
  11530. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11531. VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities;
  11532. VkResult result = d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice,
  11533. reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ),
  11534. reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) );
  11535. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
  11536. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities );
  11537. }
  11538. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11539. #if defined( VK_USE_PLATFORM_IOS_MVK )
  11540. //=== VK_MVK_ios_surface ===
  11541. template <typename Dispatch>
  11542. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK * pCreateInfo,
  11543. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  11544. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  11545. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11546. {
  11547. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11548. return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance,
  11549. reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ),
  11550. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  11551. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  11552. }
  11553. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11554. template <typename Dispatch>
  11555. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  11556. Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,
  11557. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  11558. Dispatch const & d ) const
  11559. {
  11560. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11561. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  11562. VkResult result =
  11563. d.vkCreateIOSSurfaceMVK( m_instance,
  11564. reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
  11565. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  11566. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  11567. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" );
  11568. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  11569. }
  11570. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  11571. template <typename Dispatch>
  11572. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  11573. Instance::createIOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK & createInfo,
  11574. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  11575. Dispatch const & d ) const
  11576. {
  11577. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11578. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  11579. VkResult result =
  11580. d.vkCreateIOSSurfaceMVK( m_instance,
  11581. reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ),
  11582. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  11583. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  11584. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique" );
  11585. return createResultValueType(
  11586. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  11587. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  11588. }
  11589. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  11590. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11591. #endif /*VK_USE_PLATFORM_IOS_MVK*/
  11592. #if defined( VK_USE_PLATFORM_MACOS_MVK )
  11593. //=== VK_MVK_macos_surface ===
  11594. template <typename Dispatch>
  11595. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK * pCreateInfo,
  11596. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  11597. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  11598. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11599. {
  11600. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11601. return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance,
  11602. reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ),
  11603. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  11604. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  11605. }
  11606. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11607. template <typename Dispatch>
  11608. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  11609. Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,
  11610. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  11611. Dispatch const & d ) const
  11612. {
  11613. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11614. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  11615. VkResult result =
  11616. d.vkCreateMacOSSurfaceMVK( m_instance,
  11617. reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
  11618. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  11619. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  11620. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" );
  11621. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  11622. }
  11623. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  11624. template <typename Dispatch>
  11625. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  11626. Instance::createMacOSSurfaceMVKUnique( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK & createInfo,
  11627. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  11628. Dispatch const & d ) const
  11629. {
  11630. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11631. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  11632. VkResult result =
  11633. d.vkCreateMacOSSurfaceMVK( m_instance,
  11634. reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ),
  11635. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  11636. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  11637. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique" );
  11638. return createResultValueType(
  11639. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  11640. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  11641. }
  11642. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  11643. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11644. #endif /*VK_USE_PLATFORM_MACOS_MVK*/
  11645. //=== VK_EXT_debug_utils ===
  11646. template <typename Dispatch>
  11647. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT * pNameInfo,
  11648. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11649. {
  11650. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11651. return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) );
  11652. }
  11653. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11654. template <typename Dispatch>
  11655. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  11656. Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
  11657. {
  11658. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11659. VkResult result = d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) );
  11660. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" );
  11661. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  11662. }
  11663. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11664. template <typename Dispatch>
  11665. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT * pTagInfo,
  11666. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11667. {
  11668. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11669. return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) );
  11670. }
  11671. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11672. template <typename Dispatch>
  11673. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  11674. Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
  11675. {
  11676. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11677. VkResult result = d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) );
  11678. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" );
  11679. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  11680. }
  11681. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11682. template <typename Dispatch>
  11683. VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
  11684. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11685. {
  11686. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11687. d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
  11688. }
  11689. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11690. template <typename Dispatch>
  11691. VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
  11692. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11693. {
  11694. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11695. d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  11696. }
  11697. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11698. template <typename Dispatch>
  11699. VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11700. {
  11701. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11702. d.vkQueueEndDebugUtilsLabelEXT( m_queue );
  11703. }
  11704. template <typename Dispatch>
  11705. VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
  11706. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11707. {
  11708. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11709. d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
  11710. }
  11711. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11712. template <typename Dispatch>
  11713. VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
  11714. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11715. {
  11716. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11717. d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  11718. }
  11719. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11720. template <typename Dispatch>
  11721. VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
  11722. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11723. {
  11724. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11725. d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
  11726. }
  11727. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11728. template <typename Dispatch>
  11729. VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
  11730. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11731. {
  11732. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11733. d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  11734. }
  11735. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11736. template <typename Dispatch>
  11737. VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11738. {
  11739. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11740. d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
  11741. }
  11742. template <typename Dispatch>
  11743. VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT * pLabelInfo,
  11744. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11745. {
  11746. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11747. d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
  11748. }
  11749. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11750. template <typename Dispatch>
  11751. VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT & labelInfo,
  11752. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11753. {
  11754. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11755. d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  11756. }
  11757. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11758. template <typename Dispatch>
  11759. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  11760. Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT * pCreateInfo,
  11761. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  11762. VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT * pMessenger,
  11763. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11764. {
  11765. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11766. return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance,
  11767. reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ),
  11768. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  11769. reinterpret_cast<VkDebugUtilsMessengerEXT *>( pMessenger ) ) );
  11770. }
  11771. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11772. template <typename Dispatch>
  11773. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type
  11774. Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,
  11775. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  11776. Dispatch const & d ) const
  11777. {
  11778. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11779. VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
  11780. VkResult result = d.vkCreateDebugUtilsMessengerEXT(
  11781. m_instance,
  11782. reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
  11783. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  11784. reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) );
  11785. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" );
  11786. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), messenger );
  11787. }
  11788. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  11789. template <typename Dispatch>
  11790. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type
  11791. Instance::createDebugUtilsMessengerEXTUnique( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT & createInfo,
  11792. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  11793. Dispatch const & d ) const
  11794. {
  11795. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11796. VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
  11797. VkResult result = d.vkCreateDebugUtilsMessengerEXT(
  11798. m_instance,
  11799. reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ),
  11800. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  11801. reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) );
  11802. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique" );
  11803. return createResultValueType(
  11804. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  11805. UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>( messenger, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  11806. }
  11807. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  11808. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11809. template <typename Dispatch>
  11810. VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
  11811. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  11812. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11813. {
  11814. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11815. d.vkDestroyDebugUtilsMessengerEXT(
  11816. m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  11817. }
  11818. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11819. template <typename Dispatch>
  11820. VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
  11821. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  11822. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11823. {
  11824. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11825. d.vkDestroyDebugUtilsMessengerEXT(
  11826. m_instance,
  11827. static_cast<VkDebugUtilsMessengerEXT>( messenger ),
  11828. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  11829. }
  11830. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11831. template <typename Dispatch>
  11832. VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
  11833. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  11834. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11835. {
  11836. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11837. d.vkDestroyDebugUtilsMessengerEXT(
  11838. m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  11839. }
  11840. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11841. template <typename Dispatch>
  11842. VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger,
  11843. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  11844. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11845. {
  11846. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11847. d.vkDestroyDebugUtilsMessengerEXT(
  11848. m_instance,
  11849. static_cast<VkDebugUtilsMessengerEXT>( messenger ),
  11850. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  11851. }
  11852. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11853. template <typename Dispatch>
  11854. VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
  11855. VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
  11856. const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT * pCallbackData,
  11857. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11858. {
  11859. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11860. d.vkSubmitDebugUtilsMessageEXT( m_instance,
  11861. static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
  11862. static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
  11863. reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) );
  11864. }
  11865. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11866. template <typename Dispatch>
  11867. VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
  11868. VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes,
  11869. const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT & callbackData,
  11870. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11871. {
  11872. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11873. d.vkSubmitDebugUtilsMessageEXT( m_instance,
  11874. static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ),
  11875. static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ),
  11876. reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) );
  11877. }
  11878. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11879. #if defined( VK_USE_PLATFORM_ANDROID_KHR )
  11880. //=== VK_ANDROID_external_memory_android_hardware_buffer ===
  11881. template <typename Dispatch>
  11882. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  11883. Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer * buffer,
  11884. VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID * pProperties,
  11885. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11886. {
  11887. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11888. return static_cast<Result>(
  11889. d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) );
  11890. }
  11891. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11892. template <typename Dispatch>
  11893. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type
  11894. Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
  11895. {
  11896. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11897. VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
  11898. VkResult result =
  11899. d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) );
  11900. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
  11901. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  11902. }
  11903. template <typename X, typename Y, typename... Z, typename Dispatch>
  11904. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type
  11905. Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
  11906. {
  11907. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11908. StructureChain<X, Y, Z...> structureChain;
  11909. VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties =
  11910. structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
  11911. VkResult result =
  11912. d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) );
  11913. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
  11914. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
  11915. }
  11916. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11917. template <typename Dispatch>
  11918. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  11919. Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID * pInfo,
  11920. struct AHardwareBuffer ** pBuffer,
  11921. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11922. {
  11923. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11924. return static_cast<Result>(
  11925. d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) );
  11926. }
  11927. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11928. template <typename Dispatch>
  11929. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer *>::type
  11930. Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const
  11931. {
  11932. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11933. struct AHardwareBuffer * buffer;
  11934. VkResult result =
  11935. d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer );
  11936. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" );
  11937. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), buffer );
  11938. }
  11939. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  11940. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  11941. #if defined( VK_ENABLE_BETA_EXTENSIONS )
  11942. //=== VK_AMDX_shader_enqueue ===
  11943. template <typename Dispatch>
  11944. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  11945. Device::createExecutionGraphPipelinesAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  11946. uint32_t createInfoCount,
  11947. const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX * pCreateInfos,
  11948. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  11949. VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
  11950. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  11951. {
  11952. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11953. return static_cast<Result>( d.vkCreateExecutionGraphPipelinesAMDX( m_device,
  11954. static_cast<VkPipelineCache>( pipelineCache ),
  11955. createInfoCount,
  11956. reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( pCreateInfos ),
  11957. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  11958. reinterpret_cast<VkPipeline *>( pPipelines ) ) );
  11959. }
  11960. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  11961. template <typename PipelineAllocator, typename Dispatch>
  11962. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createExecutionGraphPipelinesAMDX(
  11963. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  11964. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
  11965. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  11966. Dispatch const & d ) const
  11967. {
  11968. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11969. std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
  11970. VkResult result = d.vkCreateExecutionGraphPipelinesAMDX(
  11971. m_device,
  11972. static_cast<VkPipelineCache>( pipelineCache ),
  11973. createInfos.size(),
  11974. reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
  11975. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  11976. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  11977. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  11978. VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX",
  11979. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  11980. return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
  11981. }
  11982. template <typename PipelineAllocator,
  11983. typename Dispatch,
  11984. typename B0,
  11985. typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
  11986. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createExecutionGraphPipelinesAMDX(
  11987. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  11988. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
  11989. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  11990. PipelineAllocator & pipelineAllocator,
  11991. Dispatch const & d ) const
  11992. {
  11993. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  11994. std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
  11995. VkResult result = d.vkCreateExecutionGraphPipelinesAMDX(
  11996. m_device,
  11997. static_cast<VkPipelineCache>( pipelineCache ),
  11998. createInfos.size(),
  11999. reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
  12000. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  12001. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  12002. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12003. VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDX",
  12004. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  12005. return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
  12006. }
  12007. template <typename Dispatch>
  12008. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
  12009. Device::createExecutionGraphPipelineAMDX( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  12010. const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,
  12011. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12012. Dispatch const & d ) const
  12013. {
  12014. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12015. VULKAN_HPP_NAMESPACE::Pipeline pipeline;
  12016. VkResult result = d.vkCreateExecutionGraphPipelinesAMDX(
  12017. m_device,
  12018. static_cast<VkPipelineCache>( pipelineCache ),
  12019. 1,
  12020. reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ),
  12021. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  12022. reinterpret_cast<VkPipeline *>( &pipeline ) );
  12023. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12024. VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDX",
  12025. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  12026. return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
  12027. }
  12028. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  12029. template <typename Dispatch, typename PipelineAllocator>
  12030. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
  12031. Device::createExecutionGraphPipelinesAMDXUnique(
  12032. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  12033. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
  12034. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12035. Dispatch const & d ) const
  12036. {
  12037. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12038. std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
  12039. VkResult result = d.vkCreateExecutionGraphPipelinesAMDX(
  12040. m_device,
  12041. static_cast<VkPipelineCache>( pipelineCache ),
  12042. createInfos.size(),
  12043. reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
  12044. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  12045. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  12046. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12047. VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique",
  12048. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  12049. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
  12050. uniquePipelines.reserve( createInfos.size() );
  12051. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  12052. for ( auto const & pipeline : pipelines )
  12053. {
  12054. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
  12055. }
  12056. return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
  12057. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
  12058. }
  12059. template <typename Dispatch,
  12060. typename PipelineAllocator,
  12061. typename B0,
  12062. typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
  12063. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
  12064. Device::createExecutionGraphPipelinesAMDXUnique(
  12065. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  12066. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX> const & createInfos,
  12067. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12068. PipelineAllocator & pipelineAllocator,
  12069. Dispatch const & d ) const
  12070. {
  12071. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12072. std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
  12073. VkResult result = d.vkCreateExecutionGraphPipelinesAMDX(
  12074. m_device,
  12075. static_cast<VkPipelineCache>( pipelineCache ),
  12076. createInfos.size(),
  12077. reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( createInfos.data() ),
  12078. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  12079. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  12080. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12081. VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelinesAMDXUnique",
  12082. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  12083. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
  12084. uniquePipelines.reserve( createInfos.size() );
  12085. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  12086. for ( auto const & pipeline : pipelines )
  12087. {
  12088. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
  12089. }
  12090. return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
  12091. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
  12092. }
  12093. template <typename Dispatch>
  12094. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
  12095. Device::createExecutionGraphPipelineAMDXUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  12096. const VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX & createInfo,
  12097. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12098. Dispatch const & d ) const
  12099. {
  12100. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12101. VULKAN_HPP_NAMESPACE::Pipeline pipeline;
  12102. VkResult result = d.vkCreateExecutionGraphPipelinesAMDX(
  12103. m_device,
  12104. static_cast<VkPipelineCache>( pipelineCache ),
  12105. 1,
  12106. reinterpret_cast<const VkExecutionGraphPipelineCreateInfoAMDX *>( &createInfo ),
  12107. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  12108. reinterpret_cast<VkPipeline *>( &pipeline ) );
  12109. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12110. VULKAN_HPP_NAMESPACE_STRING "::Device::createExecutionGraphPipelineAMDXUnique",
  12111. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  12112. return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>(
  12113. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12114. UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  12115. }
  12116. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  12117. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12118. template <typename Dispatch>
  12119. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  12120. Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
  12121. VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX * pSizeInfo,
  12122. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12123. {
  12124. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12125. return static_cast<Result>( d.vkGetExecutionGraphPipelineScratchSizeAMDX(
  12126. m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( pSizeInfo ) ) );
  12127. }
  12128. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12129. template <typename Dispatch>
  12130. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX>::type
  12131. Device::getExecutionGraphPipelineScratchSizeAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph, Dispatch const & d ) const
  12132. {
  12133. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12134. VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX sizeInfo;
  12135. VkResult result = d.vkGetExecutionGraphPipelineScratchSizeAMDX(
  12136. m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<VkExecutionGraphPipelineScratchSizeAMDX *>( &sizeInfo ) );
  12137. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineScratchSizeAMDX" );
  12138. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), sizeInfo );
  12139. }
  12140. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12141. template <typename Dispatch>
  12142. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  12143. Device::getExecutionGraphPipelineNodeIndexAMDX( VULKAN_HPP_NAMESPACE::Pipeline executionGraph,
  12144. const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX * pNodeInfo,
  12145. uint32_t * pNodeIndex,
  12146. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12147. {
  12148. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12149. return static_cast<Result>( d.vkGetExecutionGraphPipelineNodeIndexAMDX(
  12150. m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( pNodeInfo ), pNodeIndex ) );
  12151. }
  12152. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12153. template <typename Dispatch>
  12154. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type Device::getExecutionGraphPipelineNodeIndexAMDX(
  12155. VULKAN_HPP_NAMESPACE::Pipeline executionGraph, const VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX & nodeInfo, Dispatch const & d ) const
  12156. {
  12157. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12158. uint32_t nodeIndex;
  12159. VkResult result = d.vkGetExecutionGraphPipelineNodeIndexAMDX(
  12160. m_device, static_cast<VkPipeline>( executionGraph ), reinterpret_cast<const VkPipelineShaderStageNodeCreateInfoAMDX *>( &nodeInfo ), &nodeIndex );
  12161. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getExecutionGraphPipelineNodeIndexAMDX" );
  12162. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), nodeIndex );
  12163. }
  12164. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12165. template <typename Dispatch>
  12166. VULKAN_HPP_INLINE void CommandBuffer::initializeGraphScratchMemoryAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
  12167. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12168. {
  12169. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12170. d.vkCmdInitializeGraphScratchMemoryAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ) );
  12171. }
  12172. template <typename Dispatch>
  12173. VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
  12174. const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,
  12175. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12176. {
  12177. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12178. d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) );
  12179. }
  12180. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12181. template <typename Dispatch>
  12182. VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
  12183. const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,
  12184. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12185. {
  12186. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12187. d.vkCmdDispatchGraphAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
  12188. }
  12189. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12190. template <typename Dispatch>
  12191. VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
  12192. const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX * pCountInfo,
  12193. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12194. {
  12195. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12196. d.vkCmdDispatchGraphIndirectAMDX(
  12197. m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( pCountInfo ) );
  12198. }
  12199. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12200. template <typename Dispatch>
  12201. VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
  12202. const VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX & countInfo,
  12203. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12204. {
  12205. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12206. d.vkCmdDispatchGraphIndirectAMDX(
  12207. m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), reinterpret_cast<const VkDispatchGraphCountInfoAMDX *>( &countInfo ) );
  12208. }
  12209. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12210. template <typename Dispatch>
  12211. VULKAN_HPP_INLINE void CommandBuffer::dispatchGraphIndirectCountAMDX( VULKAN_HPP_NAMESPACE::DeviceAddress scratch,
  12212. VULKAN_HPP_NAMESPACE::DeviceAddress countInfo,
  12213. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12214. {
  12215. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12216. d.vkCmdDispatchGraphIndirectCountAMDX( m_commandBuffer, static_cast<VkDeviceAddress>( scratch ), static_cast<VkDeviceAddress>( countInfo ) );
  12217. }
  12218. #endif /*VK_ENABLE_BETA_EXTENSIONS*/
  12219. //=== VK_EXT_sample_locations ===
  12220. template <typename Dispatch>
  12221. VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT * pSampleLocationsInfo,
  12222. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12223. {
  12224. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12225. d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) );
  12226. }
  12227. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12228. template <typename Dispatch>
  12229. VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT & sampleLocationsInfo,
  12230. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12231. {
  12232. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12233. d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
  12234. }
  12235. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12236. template <typename Dispatch>
  12237. VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
  12238. VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT * pMultisampleProperties,
  12239. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12240. {
  12241. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12242. d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
  12243. m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( pMultisampleProperties ) );
  12244. }
  12245. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12246. template <typename Dispatch>
  12247. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT
  12248. PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12249. {
  12250. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12251. VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties;
  12252. d.vkGetPhysicalDeviceMultisamplePropertiesEXT(
  12253. m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
  12254. return multisampleProperties;
  12255. }
  12256. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12257. //=== VK_KHR_get_memory_requirements2 ===
  12258. template <typename Dispatch>
  12259. VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo,
  12260. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  12261. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12262. {
  12263. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12264. d.vkGetImageMemoryRequirements2KHR(
  12265. m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  12266. }
  12267. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12268. template <typename Dispatch>
  12269. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  12270. Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12271. {
  12272. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12273. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  12274. d.vkGetImageMemoryRequirements2KHR(
  12275. m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  12276. return memoryRequirements;
  12277. }
  12278. template <typename X, typename Y, typename... Z, typename Dispatch>
  12279. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  12280. Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12281. {
  12282. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12283. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  12284. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  12285. d.vkGetImageMemoryRequirements2KHR(
  12286. m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  12287. return structureChain;
  12288. }
  12289. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12290. template <typename Dispatch>
  12291. VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo,
  12292. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  12293. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12294. {
  12295. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12296. d.vkGetBufferMemoryRequirements2KHR(
  12297. m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  12298. }
  12299. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12300. template <typename Dispatch>
  12301. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  12302. Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12303. {
  12304. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12305. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  12306. d.vkGetBufferMemoryRequirements2KHR(
  12307. m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  12308. return memoryRequirements;
  12309. }
  12310. template <typename X, typename Y, typename... Z, typename Dispatch>
  12311. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  12312. Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12313. {
  12314. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12315. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  12316. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  12317. d.vkGetBufferMemoryRequirements2KHR(
  12318. m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  12319. return structureChain;
  12320. }
  12321. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12322. template <typename Dispatch>
  12323. VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo,
  12324. uint32_t * pSparseMemoryRequirementCount,
  12325. VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
  12326. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12327. {
  12328. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12329. d.vkGetImageSparseMemoryRequirements2KHR( m_device,
  12330. reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ),
  12331. pSparseMemoryRequirementCount,
  12332. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
  12333. }
  12334. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12335. template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
  12336. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
  12337. Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const
  12338. {
  12339. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12340. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
  12341. uint32_t sparseMemoryRequirementCount;
  12342. d.vkGetImageSparseMemoryRequirements2KHR(
  12343. m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
  12344. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  12345. d.vkGetImageSparseMemoryRequirements2KHR( m_device,
  12346. reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
  12347. &sparseMemoryRequirementCount,
  12348. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  12349. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  12350. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  12351. {
  12352. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  12353. }
  12354. return sparseMemoryRequirements;
  12355. }
  12356. template <typename SparseImageMemoryRequirements2Allocator,
  12357. typename Dispatch,
  12358. typename B1,
  12359. typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type>
  12360. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
  12361. Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info,
  12362. SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
  12363. Dispatch const & d ) const
  12364. {
  12365. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12366. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
  12367. sparseImageMemoryRequirements2Allocator );
  12368. uint32_t sparseMemoryRequirementCount;
  12369. d.vkGetImageSparseMemoryRequirements2KHR(
  12370. m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
  12371. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  12372. d.vkGetImageSparseMemoryRequirements2KHR( m_device,
  12373. reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ),
  12374. &sparseMemoryRequirementCount,
  12375. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  12376. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  12377. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  12378. {
  12379. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  12380. }
  12381. return sparseMemoryRequirements;
  12382. }
  12383. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12384. //=== VK_KHR_acceleration_structure ===
  12385. template <typename Dispatch>
  12386. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  12387. Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR * pCreateInfo,
  12388. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  12389. VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructure,
  12390. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12391. {
  12392. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12393. return static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device,
  12394. reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ),
  12395. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  12396. reinterpret_cast<VkAccelerationStructureKHR *>( pAccelerationStructure ) ) );
  12397. }
  12398. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12399. template <typename Dispatch>
  12400. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type
  12401. Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,
  12402. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12403. Dispatch const & d ) const
  12404. {
  12405. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12406. VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
  12407. VkResult result = d.vkCreateAccelerationStructureKHR(
  12408. m_device,
  12409. reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
  12410. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  12411. reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) );
  12412. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" );
  12413. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), accelerationStructure );
  12414. }
  12415. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  12416. template <typename Dispatch>
  12417. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type
  12418. Device::createAccelerationStructureKHRUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR & createInfo,
  12419. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12420. Dispatch const & d ) const
  12421. {
  12422. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12423. VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
  12424. VkResult result = d.vkCreateAccelerationStructureKHR(
  12425. m_device,
  12426. reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ),
  12427. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  12428. reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) );
  12429. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique" );
  12430. return createResultValueType(
  12431. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12432. UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>( accelerationStructure, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  12433. }
  12434. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  12435. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12436. template <typename Dispatch>
  12437. VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
  12438. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  12439. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12440. {
  12441. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12442. d.vkDestroyAccelerationStructureKHR(
  12443. m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  12444. }
  12445. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12446. template <typename Dispatch>
  12447. VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
  12448. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12449. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12450. {
  12451. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12452. d.vkDestroyAccelerationStructureKHR(
  12453. m_device,
  12454. static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
  12455. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  12456. }
  12457. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12458. template <typename Dispatch>
  12459. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
  12460. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  12461. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12462. {
  12463. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12464. d.vkDestroyAccelerationStructureKHR(
  12465. m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  12466. }
  12467. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12468. template <typename Dispatch>
  12469. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure,
  12470. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12471. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12472. {
  12473. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12474. d.vkDestroyAccelerationStructureKHR(
  12475. m_device,
  12476. static_cast<VkAccelerationStructureKHR>( accelerationStructure ),
  12477. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  12478. }
  12479. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12480. template <typename Dispatch>
  12481. VULKAN_HPP_INLINE void
  12482. CommandBuffer::buildAccelerationStructuresKHR( uint32_t infoCount,
  12483. const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
  12484. const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
  12485. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12486. {
  12487. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12488. d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer,
  12489. infoCount,
  12490. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
  12491. reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) );
  12492. }
  12493. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12494. template <typename Dispatch>
  12495. VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR(
  12496. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
  12497. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
  12498. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  12499. {
  12500. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12501. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  12502. VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
  12503. # else
  12504. if ( infos.size() != pBuildRangeInfos.size() )
  12505. {
  12506. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
  12507. }
  12508. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  12509. d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer,
  12510. infos.size(),
  12511. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
  12512. reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
  12513. }
  12514. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12515. template <typename Dispatch>
  12516. VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( uint32_t infoCount,
  12517. const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
  12518. const VULKAN_HPP_NAMESPACE::DeviceAddress * pIndirectDeviceAddresses,
  12519. const uint32_t * pIndirectStrides,
  12520. const uint32_t * const * ppMaxPrimitiveCounts,
  12521. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12522. {
  12523. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12524. d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer,
  12525. infoCount,
  12526. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
  12527. reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ),
  12528. pIndirectStrides,
  12529. ppMaxPrimitiveCounts );
  12530. }
  12531. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12532. template <typename Dispatch>
  12533. VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR(
  12534. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
  12535. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses,
  12536. VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & indirectStrides,
  12537. VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t * const> const & pMaxPrimitiveCounts,
  12538. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  12539. {
  12540. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12541. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  12542. VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() );
  12543. VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() );
  12544. VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() );
  12545. # else
  12546. if ( infos.size() != indirectDeviceAddresses.size() )
  12547. {
  12548. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" );
  12549. }
  12550. if ( infos.size() != indirectStrides.size() )
  12551. {
  12552. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" );
  12553. }
  12554. if ( infos.size() != pMaxPrimitiveCounts.size() )
  12555. {
  12556. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" );
  12557. }
  12558. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  12559. d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer,
  12560. infos.size(),
  12561. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
  12562. reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ),
  12563. indirectStrides.data(),
  12564. pMaxPrimitiveCounts.data() );
  12565. }
  12566. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12567. template <typename Dispatch>
  12568. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  12569. Device::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  12570. uint32_t infoCount,
  12571. const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pInfos,
  12572. const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos,
  12573. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12574. {
  12575. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12576. return static_cast<Result>(
  12577. d.vkBuildAccelerationStructuresKHR( m_device,
  12578. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  12579. infoCount,
  12580. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ),
  12581. reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ) );
  12582. }
  12583. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12584. template <typename Dispatch>
  12585. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::buildAccelerationStructuresKHR(
  12586. VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  12587. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos,
  12588. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR * const> const & pBuildRangeInfos,
  12589. Dispatch const & d ) const
  12590. {
  12591. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12592. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  12593. VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
  12594. # else
  12595. if ( infos.size() != pBuildRangeInfos.size() )
  12596. {
  12597. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
  12598. }
  12599. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  12600. VkResult result =
  12601. d.vkBuildAccelerationStructuresKHR( m_device,
  12602. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  12603. infos.size(),
  12604. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ),
  12605. reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
  12606. resultCheck(
  12607. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12608. VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR",
  12609. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  12610. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  12611. }
  12612. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12613. template <typename Dispatch>
  12614. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  12615. const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
  12616. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12617. {
  12618. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12619. return static_cast<Result>( d.vkCopyAccelerationStructureKHR(
  12620. m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) );
  12621. }
  12622. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12623. template <typename Dispatch>
  12624. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  12625. Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  12626. const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,
  12627. Dispatch const & d ) const
  12628. {
  12629. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12630. VkResult result = d.vkCopyAccelerationStructureKHR(
  12631. m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
  12632. resultCheck(
  12633. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12634. VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR",
  12635. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  12636. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  12637. }
  12638. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12639. template <typename Dispatch>
  12640. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  12641. Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  12642. const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
  12643. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12644. {
  12645. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12646. return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR(
  12647. m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) );
  12648. }
  12649. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12650. template <typename Dispatch>
  12651. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  12652. Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  12653. const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,
  12654. Dispatch const & d ) const
  12655. {
  12656. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12657. VkResult result = d.vkCopyAccelerationStructureToMemoryKHR(
  12658. m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
  12659. resultCheck(
  12660. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12661. VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR",
  12662. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  12663. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  12664. }
  12665. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12666. template <typename Dispatch>
  12667. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  12668. Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  12669. const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
  12670. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12671. {
  12672. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12673. return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR(
  12674. m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) );
  12675. }
  12676. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12677. template <typename Dispatch>
  12678. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  12679. Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  12680. const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,
  12681. Dispatch const & d ) const
  12682. {
  12683. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12684. VkResult result = d.vkCopyMemoryToAccelerationStructureKHR(
  12685. m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
  12686. resultCheck(
  12687. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  12688. VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR",
  12689. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  12690. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  12691. }
  12692. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12693. template <typename Dispatch>
  12694. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  12695. Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount,
  12696. const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
  12697. VULKAN_HPP_NAMESPACE::QueryType queryType,
  12698. size_t dataSize,
  12699. void * pData,
  12700. size_t stride,
  12701. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12702. {
  12703. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12704. return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
  12705. accelerationStructureCount,
  12706. reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
  12707. static_cast<VkQueryType>( queryType ),
  12708. dataSize,
  12709. pData,
  12710. stride ) );
  12711. }
  12712. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12713. template <typename DataType, typename DataTypeAllocator, typename Dispatch>
  12714. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type
  12715. Device::writeAccelerationStructuresPropertiesKHR(
  12716. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
  12717. VULKAN_HPP_NAMESPACE::QueryType queryType,
  12718. size_t dataSize,
  12719. size_t stride,
  12720. Dispatch const & d ) const
  12721. {
  12722. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12723. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  12724. std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
  12725. VkResult result = d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
  12726. accelerationStructures.size(),
  12727. reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
  12728. static_cast<VkQueryType>( queryType ),
  12729. data.size() * sizeof( DataType ),
  12730. reinterpret_cast<void *>( data.data() ),
  12731. stride );
  12732. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
  12733. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  12734. }
  12735. template <typename DataType, typename Dispatch>
  12736. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::writeAccelerationStructuresPropertyKHR(
  12737. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
  12738. VULKAN_HPP_NAMESPACE::QueryType queryType,
  12739. size_t stride,
  12740. Dispatch const & d ) const
  12741. {
  12742. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12743. DataType data;
  12744. VkResult result = d.vkWriteAccelerationStructuresPropertiesKHR( m_device,
  12745. accelerationStructures.size(),
  12746. reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
  12747. static_cast<VkQueryType>( queryType ),
  12748. sizeof( DataType ),
  12749. reinterpret_cast<void *>( &data ),
  12750. stride );
  12751. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" );
  12752. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  12753. }
  12754. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12755. template <typename Dispatch>
  12756. VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR * pInfo,
  12757. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12758. {
  12759. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12760. d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) );
  12761. }
  12762. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12763. template <typename Dispatch>
  12764. VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR & info,
  12765. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12766. {
  12767. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12768. d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
  12769. }
  12770. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12771. template <typename Dispatch>
  12772. VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR * pInfo,
  12773. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12774. {
  12775. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12776. d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) );
  12777. }
  12778. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12779. template <typename Dispatch>
  12780. VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR & info,
  12781. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12782. {
  12783. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12784. d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
  12785. }
  12786. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12787. template <typename Dispatch>
  12788. VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR * pInfo,
  12789. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12790. {
  12791. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12792. d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) );
  12793. }
  12794. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12795. template <typename Dispatch>
  12796. VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR & info,
  12797. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12798. {
  12799. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12800. d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
  12801. }
  12802. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12803. template <typename Dispatch>
  12804. VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR * pInfo,
  12805. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12806. {
  12807. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12808. return static_cast<DeviceAddress>(
  12809. d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) );
  12810. }
  12811. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12812. template <typename Dispatch>
  12813. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
  12814. Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR & info,
  12815. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12816. {
  12817. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12818. VkDeviceAddress result =
  12819. d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) );
  12820. return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
  12821. }
  12822. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12823. template <typename Dispatch>
  12824. VULKAN_HPP_INLINE void
  12825. CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount,
  12826. const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR * pAccelerationStructures,
  12827. VULKAN_HPP_NAMESPACE::QueryType queryType,
  12828. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  12829. uint32_t firstQuery,
  12830. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12831. {
  12832. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12833. d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer,
  12834. accelerationStructureCount,
  12835. reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ),
  12836. static_cast<VkQueryType>( queryType ),
  12837. static_cast<VkQueryPool>( queryPool ),
  12838. firstQuery );
  12839. }
  12840. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12841. template <typename Dispatch>
  12842. VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR(
  12843. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures,
  12844. VULKAN_HPP_NAMESPACE::QueryType queryType,
  12845. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  12846. uint32_t firstQuery,
  12847. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12848. {
  12849. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12850. d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer,
  12851. accelerationStructures.size(),
  12852. reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ),
  12853. static_cast<VkQueryType>( queryType ),
  12854. static_cast<VkQueryPool>( queryPool ),
  12855. firstQuery );
  12856. }
  12857. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12858. template <typename Dispatch>
  12859. VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR * pVersionInfo,
  12860. VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,
  12861. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12862. {
  12863. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12864. d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device,
  12865. reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ),
  12866. reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
  12867. }
  12868. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12869. template <typename Dispatch>
  12870. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
  12871. Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR & versionInfo,
  12872. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12873. {
  12874. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12875. VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
  12876. d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device,
  12877. reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ),
  12878. reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
  12879. return compatibility;
  12880. }
  12881. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12882. template <typename Dispatch>
  12883. VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
  12884. const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR * pBuildInfo,
  12885. const uint32_t * pMaxPrimitiveCounts,
  12886. VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR * pSizeInfo,
  12887. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12888. {
  12889. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12890. d.vkGetAccelerationStructureBuildSizesKHR( m_device,
  12891. static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
  12892. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ),
  12893. pMaxPrimitiveCounts,
  12894. reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) );
  12895. }
  12896. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12897. template <typename Dispatch>
  12898. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR
  12899. Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
  12900. const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR & buildInfo,
  12901. VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & maxPrimitiveCounts,
  12902. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  12903. {
  12904. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12905. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  12906. VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount );
  12907. # else
  12908. if ( maxPrimitiveCounts.size() != buildInfo.geometryCount )
  12909. {
  12910. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" );
  12911. }
  12912. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  12913. VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo;
  12914. d.vkGetAccelerationStructureBuildSizesKHR( m_device,
  12915. static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
  12916. reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ),
  12917. maxPrimitiveCounts.data(),
  12918. reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) );
  12919. return sizeInfo;
  12920. }
  12921. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12922. //=== VK_KHR_ray_tracing_pipeline ===
  12923. template <typename Dispatch>
  12924. VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
  12925. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
  12926. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
  12927. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
  12928. uint32_t width,
  12929. uint32_t height,
  12930. uint32_t depth,
  12931. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12932. {
  12933. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12934. d.vkCmdTraceRaysKHR( m_commandBuffer,
  12935. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
  12936. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
  12937. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
  12938. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ),
  12939. width,
  12940. height,
  12941. depth );
  12942. }
  12943. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12944. template <typename Dispatch>
  12945. VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
  12946. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
  12947. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
  12948. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
  12949. uint32_t width,
  12950. uint32_t height,
  12951. uint32_t depth,
  12952. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12953. {
  12954. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12955. d.vkCmdTraceRaysKHR( m_commandBuffer,
  12956. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
  12957. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
  12958. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
  12959. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
  12960. width,
  12961. height,
  12962. depth );
  12963. }
  12964. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  12965. template <typename Dispatch>
  12966. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  12967. Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  12968. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  12969. uint32_t createInfoCount,
  12970. const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR * pCreateInfos,
  12971. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  12972. VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
  12973. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  12974. {
  12975. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12976. return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device,
  12977. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  12978. static_cast<VkPipelineCache>( pipelineCache ),
  12979. createInfoCount,
  12980. reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ),
  12981. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  12982. reinterpret_cast<VkPipeline *>( pPipelines ) ) );
  12983. }
  12984. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  12985. template <typename PipelineAllocator, typename Dispatch>
  12986. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
  12987. Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  12988. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  12989. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
  12990. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  12991. Dispatch const & d ) const
  12992. {
  12993. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  12994. std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
  12995. VkResult result = d.vkCreateRayTracingPipelinesKHR(
  12996. m_device,
  12997. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  12998. static_cast<VkPipelineCache>( pipelineCache ),
  12999. createInfos.size(),
  13000. reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
  13001. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13002. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  13003. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13004. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR",
  13005. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  13006. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  13007. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
  13008. VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  13009. return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
  13010. }
  13011. template <typename PipelineAllocator,
  13012. typename Dispatch,
  13013. typename B0,
  13014. typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
  13015. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
  13016. Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  13017. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  13018. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
  13019. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13020. PipelineAllocator & pipelineAllocator,
  13021. Dispatch const & d ) const
  13022. {
  13023. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13024. std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
  13025. VkResult result = d.vkCreateRayTracingPipelinesKHR(
  13026. m_device,
  13027. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  13028. static_cast<VkPipelineCache>( pipelineCache ),
  13029. createInfos.size(),
  13030. reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
  13031. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13032. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  13033. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13034. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR",
  13035. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  13036. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  13037. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
  13038. VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  13039. return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
  13040. }
  13041. template <typename Dispatch>
  13042. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
  13043. Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  13044. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  13045. const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
  13046. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13047. Dispatch const & d ) const
  13048. {
  13049. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13050. VULKAN_HPP_NAMESPACE::Pipeline pipeline;
  13051. VkResult result = d.vkCreateRayTracingPipelinesKHR(
  13052. m_device,
  13053. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  13054. static_cast<VkPipelineCache>( pipelineCache ),
  13055. 1,
  13056. reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
  13057. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13058. reinterpret_cast<VkPipeline *>( &pipeline ) );
  13059. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13060. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR",
  13061. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  13062. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  13063. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
  13064. VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  13065. return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
  13066. }
  13067. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  13068. template <typename Dispatch, typename PipelineAllocator>
  13069. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
  13070. Device::createRayTracingPipelinesKHRUnique(
  13071. VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  13072. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  13073. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
  13074. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13075. Dispatch const & d ) const
  13076. {
  13077. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13078. std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
  13079. VkResult result = d.vkCreateRayTracingPipelinesKHR(
  13080. m_device,
  13081. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  13082. static_cast<VkPipelineCache>( pipelineCache ),
  13083. createInfos.size(),
  13084. reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
  13085. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13086. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  13087. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13088. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique",
  13089. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  13090. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  13091. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
  13092. VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  13093. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
  13094. uniquePipelines.reserve( createInfos.size() );
  13095. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  13096. for ( auto const & pipeline : pipelines )
  13097. {
  13098. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
  13099. }
  13100. return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
  13101. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
  13102. }
  13103. template <typename Dispatch,
  13104. typename PipelineAllocator,
  13105. typename B0,
  13106. typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
  13107. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
  13108. Device::createRayTracingPipelinesKHRUnique(
  13109. VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  13110. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  13111. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos,
  13112. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13113. PipelineAllocator & pipelineAllocator,
  13114. Dispatch const & d ) const
  13115. {
  13116. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13117. std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
  13118. VkResult result = d.vkCreateRayTracingPipelinesKHR(
  13119. m_device,
  13120. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  13121. static_cast<VkPipelineCache>( pipelineCache ),
  13122. createInfos.size(),
  13123. reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ),
  13124. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13125. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  13126. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13127. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique",
  13128. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  13129. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  13130. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
  13131. VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  13132. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
  13133. uniquePipelines.reserve( createInfos.size() );
  13134. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  13135. for ( auto const & pipeline : pipelines )
  13136. {
  13137. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
  13138. }
  13139. return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
  13140. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
  13141. }
  13142. template <typename Dispatch>
  13143. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
  13144. Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  13145. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  13146. const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR & createInfo,
  13147. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13148. Dispatch const & d ) const
  13149. {
  13150. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13151. VULKAN_HPP_NAMESPACE::Pipeline pipeline;
  13152. VkResult result = d.vkCreateRayTracingPipelinesKHR(
  13153. m_device,
  13154. static_cast<VkDeferredOperationKHR>( deferredOperation ),
  13155. static_cast<VkPipelineCache>( pipelineCache ),
  13156. 1,
  13157. reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ),
  13158. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13159. reinterpret_cast<VkPipeline *>( &pipeline ) );
  13160. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13161. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique",
  13162. { VULKAN_HPP_NAMESPACE::Result::eSuccess,
  13163. VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR,
  13164. VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR,
  13165. VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  13166. return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>(
  13167. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13168. UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  13169. }
  13170. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  13171. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13172. template <typename Dispatch>
  13173. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  13174. uint32_t firstGroup,
  13175. uint32_t groupCount,
  13176. size_t dataSize,
  13177. void * pData,
  13178. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13179. {
  13180. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13181. return static_cast<Result>(
  13182. d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
  13183. }
  13184. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13185. template <typename DataType, typename DataTypeAllocator, typename Dispatch>
  13186. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::getRayTracingShaderGroupHandlesKHR(
  13187. VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
  13188. {
  13189. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13190. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  13191. std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
  13192. VkResult result = d.vkGetRayTracingShaderGroupHandlesKHR(
  13193. m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
  13194. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" );
  13195. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  13196. }
  13197. template <typename DataType, typename Dispatch>
  13198. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
  13199. Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
  13200. {
  13201. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13202. DataType data;
  13203. VkResult result = d.vkGetRayTracingShaderGroupHandlesKHR(
  13204. m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) );
  13205. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" );
  13206. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  13207. }
  13208. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13209. template <typename Dispatch>
  13210. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  13211. uint32_t firstGroup,
  13212. uint32_t groupCount,
  13213. size_t dataSize,
  13214. void * pData,
  13215. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13216. {
  13217. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13218. return static_cast<Result>(
  13219. d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
  13220. }
  13221. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13222. template <typename DataType, typename DataTypeAllocator, typename Dispatch>
  13223. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type
  13224. Device::getRayTracingCaptureReplayShaderGroupHandlesKHR(
  13225. VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
  13226. {
  13227. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13228. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  13229. std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
  13230. VkResult result = d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
  13231. m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
  13232. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
  13233. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  13234. }
  13235. template <typename DataType, typename Dispatch>
  13236. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR(
  13237. VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
  13238. {
  13239. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13240. DataType data;
  13241. VkResult result = d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
  13242. m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) );
  13243. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" );
  13244. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  13245. }
  13246. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13247. template <typename Dispatch>
  13248. VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pRaygenShaderBindingTable,
  13249. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pMissShaderBindingTable,
  13250. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pHitShaderBindingTable,
  13251. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR * pCallableShaderBindingTable,
  13252. VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
  13253. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13254. {
  13255. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13256. d.vkCmdTraceRaysIndirectKHR( m_commandBuffer,
  13257. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ),
  13258. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ),
  13259. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ),
  13260. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ),
  13261. static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
  13262. }
  13263. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13264. template <typename Dispatch>
  13265. VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & raygenShaderBindingTable,
  13266. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & missShaderBindingTable,
  13267. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & hitShaderBindingTable,
  13268. const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR & callableShaderBindingTable,
  13269. VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
  13270. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13271. {
  13272. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13273. d.vkCmdTraceRaysIndirectKHR( m_commandBuffer,
  13274. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ),
  13275. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ),
  13276. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ),
  13277. reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ),
  13278. static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
  13279. }
  13280. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13281. template <typename Dispatch>
  13282. VULKAN_HPP_INLINE DeviceSize Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  13283. uint32_t group,
  13284. VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader,
  13285. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13286. {
  13287. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13288. return static_cast<DeviceSize>(
  13289. d.vkGetRayTracingShaderGroupStackSizeKHR( m_device, static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) );
  13290. }
  13291. template <typename Dispatch>
  13292. VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13293. {
  13294. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13295. d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize );
  13296. }
  13297. //=== VK_KHR_sampler_ycbcr_conversion ===
  13298. template <typename Dispatch>
  13299. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  13300. Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo,
  13301. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  13302. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion,
  13303. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13304. {
  13305. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13306. return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device,
  13307. reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ),
  13308. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  13309. reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
  13310. }
  13311. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13312. template <typename Dispatch>
  13313. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type
  13314. Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
  13315. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13316. Dispatch const & d ) const
  13317. {
  13318. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13319. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
  13320. VkResult result = d.vkCreateSamplerYcbcrConversionKHR(
  13321. m_device,
  13322. reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
  13323. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13324. reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
  13325. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" );
  13326. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), ycbcrConversion );
  13327. }
  13328. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  13329. template <typename Dispatch>
  13330. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type
  13331. Device::createSamplerYcbcrConversionKHRUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo,
  13332. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13333. Dispatch const & d ) const
  13334. {
  13335. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13336. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
  13337. VkResult result = d.vkCreateSamplerYcbcrConversionKHR(
  13338. m_device,
  13339. reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ),
  13340. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13341. reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
  13342. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique" );
  13343. return createResultValueType(
  13344. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13345. UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  13346. }
  13347. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  13348. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13349. template <typename Dispatch>
  13350. VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
  13351. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  13352. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13353. {
  13354. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13355. d.vkDestroySamplerYcbcrConversionKHR(
  13356. m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  13357. }
  13358. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13359. template <typename Dispatch>
  13360. VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion,
  13361. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13362. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13363. {
  13364. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13365. d.vkDestroySamplerYcbcrConversionKHR(
  13366. m_device,
  13367. static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ),
  13368. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  13369. }
  13370. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13371. //=== VK_KHR_bind_memory2 ===
  13372. template <typename Dispatch>
  13373. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount,
  13374. const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos,
  13375. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13376. {
  13377. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13378. return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
  13379. }
  13380. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13381. template <typename Dispatch>
  13382. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  13383. Device::bindBufferMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos,
  13384. Dispatch const & d ) const
  13385. {
  13386. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13387. VkResult result = d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) );
  13388. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" );
  13389. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  13390. }
  13391. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13392. template <typename Dispatch>
  13393. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount,
  13394. const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos,
  13395. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13396. {
  13397. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13398. return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
  13399. }
  13400. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13401. template <typename Dispatch>
  13402. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  13403. Device::bindImageMemory2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const
  13404. {
  13405. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13406. VkResult result = d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) );
  13407. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" );
  13408. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  13409. }
  13410. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13411. //=== VK_EXT_image_drm_format_modifier ===
  13412. template <typename Dispatch>
  13413. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT(
  13414. VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13415. {
  13416. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13417. return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT(
  13418. m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) );
  13419. }
  13420. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13421. template <typename Dispatch>
  13422. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type
  13423. Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
  13424. {
  13425. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13426. VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties;
  13427. VkResult result = d.vkGetImageDrmFormatModifierPropertiesEXT(
  13428. m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) );
  13429. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" );
  13430. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  13431. }
  13432. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13433. //=== VK_EXT_validation_cache ===
  13434. template <typename Dispatch>
  13435. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT * pCreateInfo,
  13436. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  13437. VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pValidationCache,
  13438. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13439. {
  13440. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13441. return static_cast<Result>( d.vkCreateValidationCacheEXT( m_device,
  13442. reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ),
  13443. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  13444. reinterpret_cast<VkValidationCacheEXT *>( pValidationCache ) ) );
  13445. }
  13446. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13447. template <typename Dispatch>
  13448. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type
  13449. Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,
  13450. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13451. Dispatch const & d ) const
  13452. {
  13453. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13454. VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
  13455. VkResult result = d.vkCreateValidationCacheEXT(
  13456. m_device,
  13457. reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
  13458. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13459. reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) );
  13460. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" );
  13461. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), validationCache );
  13462. }
  13463. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  13464. template <typename Dispatch>
  13465. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type
  13466. Device::createValidationCacheEXTUnique( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT & createInfo,
  13467. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13468. Dispatch const & d ) const
  13469. {
  13470. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13471. VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
  13472. VkResult result = d.vkCreateValidationCacheEXT(
  13473. m_device,
  13474. reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ),
  13475. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13476. reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) );
  13477. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique" );
  13478. return createResultValueType(
  13479. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13480. UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>( validationCache, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  13481. }
  13482. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  13483. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13484. template <typename Dispatch>
  13485. VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
  13486. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  13487. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13488. {
  13489. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13490. d.vkDestroyValidationCacheEXT(
  13491. m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  13492. }
  13493. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13494. template <typename Dispatch>
  13495. VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
  13496. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13497. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13498. {
  13499. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13500. d.vkDestroyValidationCacheEXT(
  13501. m_device,
  13502. static_cast<VkValidationCacheEXT>( validationCache ),
  13503. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  13504. }
  13505. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13506. template <typename Dispatch>
  13507. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
  13508. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  13509. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13510. {
  13511. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13512. d.vkDestroyValidationCacheEXT(
  13513. m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  13514. }
  13515. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13516. template <typename Dispatch>
  13517. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
  13518. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13519. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13520. {
  13521. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13522. d.vkDestroyValidationCacheEXT(
  13523. m_device,
  13524. static_cast<VkValidationCacheEXT>( validationCache ),
  13525. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  13526. }
  13527. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13528. template <typename Dispatch>
  13529. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
  13530. uint32_t srcCacheCount,
  13531. const VULKAN_HPP_NAMESPACE::ValidationCacheEXT * pSrcCaches,
  13532. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13533. {
  13534. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13535. return static_cast<Result>( d.vkMergeValidationCachesEXT(
  13536. m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) );
  13537. }
  13538. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13539. template <typename Dispatch>
  13540. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  13541. Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache,
  13542. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches,
  13543. Dispatch const & d ) const
  13544. {
  13545. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13546. VkResult result = d.vkMergeValidationCachesEXT(
  13547. m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.size(), reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) );
  13548. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" );
  13549. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  13550. }
  13551. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13552. template <typename Dispatch>
  13553. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache,
  13554. size_t * pDataSize,
  13555. void * pData,
  13556. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13557. {
  13558. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13559. return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) );
  13560. }
  13561. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13562. template <typename Uint8_tAllocator, typename Dispatch>
  13563. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
  13564. Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d ) const
  13565. {
  13566. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13567. std::vector<uint8_t, Uint8_tAllocator> data;
  13568. size_t dataSize;
  13569. VkResult result;
  13570. do
  13571. {
  13572. result = d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr );
  13573. if ( ( result == VK_SUCCESS ) && dataSize )
  13574. {
  13575. data.resize( dataSize );
  13576. result =
  13577. d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
  13578. }
  13579. } while ( result == VK_INCOMPLETE );
  13580. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" );
  13581. VULKAN_HPP_ASSERT( dataSize <= data.size() );
  13582. if ( dataSize < data.size() )
  13583. {
  13584. data.resize( dataSize );
  13585. }
  13586. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  13587. }
  13588. template <typename Uint8_tAllocator,
  13589. typename Dispatch,
  13590. typename B1,
  13591. typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type>
  13592. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
  13593. Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
  13594. {
  13595. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13596. std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
  13597. size_t dataSize;
  13598. VkResult result;
  13599. do
  13600. {
  13601. result = d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr );
  13602. if ( ( result == VK_SUCCESS ) && dataSize )
  13603. {
  13604. data.resize( dataSize );
  13605. result =
  13606. d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
  13607. }
  13608. } while ( result == VK_INCOMPLETE );
  13609. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getValidationCacheDataEXT" );
  13610. VULKAN_HPP_ASSERT( dataSize <= data.size() );
  13611. if ( dataSize < data.size() )
  13612. {
  13613. data.resize( dataSize );
  13614. }
  13615. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  13616. }
  13617. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13618. //=== VK_NV_shading_rate_image ===
  13619. template <typename Dispatch>
  13620. VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView,
  13621. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
  13622. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13623. {
  13624. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13625. d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
  13626. }
  13627. template <typename Dispatch>
  13628. VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport,
  13629. uint32_t viewportCount,
  13630. const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV * pShadingRatePalettes,
  13631. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13632. {
  13633. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13634. d.vkCmdSetViewportShadingRatePaletteNV(
  13635. m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) );
  13636. }
  13637. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13638. template <typename Dispatch>
  13639. VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV(
  13640. uint32_t firstViewport,
  13641. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes,
  13642. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13643. {
  13644. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13645. d.vkCmdSetViewportShadingRatePaletteNV(
  13646. m_commandBuffer, firstViewport, shadingRatePalettes.size(), reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) );
  13647. }
  13648. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13649. template <typename Dispatch>
  13650. VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
  13651. uint32_t customSampleOrderCount,
  13652. const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV * pCustomSampleOrders,
  13653. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13654. {
  13655. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13656. d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
  13657. static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
  13658. customSampleOrderCount,
  13659. reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) );
  13660. }
  13661. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13662. template <typename Dispatch>
  13663. VULKAN_HPP_INLINE void
  13664. CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType,
  13665. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders,
  13666. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13667. {
  13668. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13669. d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer,
  13670. static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ),
  13671. customSampleOrders.size(),
  13672. reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) );
  13673. }
  13674. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13675. //=== VK_NV_ray_tracing ===
  13676. template <typename Dispatch>
  13677. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  13678. Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV * pCreateInfo,
  13679. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  13680. VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructure,
  13681. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13682. {
  13683. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13684. return static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device,
  13685. reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ),
  13686. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  13687. reinterpret_cast<VkAccelerationStructureNV *>( pAccelerationStructure ) ) );
  13688. }
  13689. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13690. template <typename Dispatch>
  13691. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type
  13692. Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,
  13693. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13694. Dispatch const & d ) const
  13695. {
  13696. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13697. VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
  13698. VkResult result = d.vkCreateAccelerationStructureNV(
  13699. m_device,
  13700. reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
  13701. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13702. reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) );
  13703. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" );
  13704. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), accelerationStructure );
  13705. }
  13706. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  13707. template <typename Dispatch>
  13708. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type
  13709. Device::createAccelerationStructureNVUnique( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV & createInfo,
  13710. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13711. Dispatch const & d ) const
  13712. {
  13713. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13714. VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
  13715. VkResult result = d.vkCreateAccelerationStructureNV(
  13716. m_device,
  13717. reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ),
  13718. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13719. reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) );
  13720. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique" );
  13721. return createResultValueType(
  13722. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13723. UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>( accelerationStructure, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  13724. }
  13725. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  13726. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13727. template <typename Dispatch>
  13728. VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
  13729. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  13730. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13731. {
  13732. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13733. d.vkDestroyAccelerationStructureNV(
  13734. m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  13735. }
  13736. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13737. template <typename Dispatch>
  13738. VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
  13739. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13740. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13741. {
  13742. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13743. d.vkDestroyAccelerationStructureNV(
  13744. m_device,
  13745. static_cast<VkAccelerationStructureNV>( accelerationStructure ),
  13746. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  13747. }
  13748. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13749. template <typename Dispatch>
  13750. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
  13751. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  13752. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13753. {
  13754. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13755. d.vkDestroyAccelerationStructureNV(
  13756. m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  13757. }
  13758. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13759. template <typename Dispatch>
  13760. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
  13761. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13762. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13763. {
  13764. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13765. d.vkDestroyAccelerationStructureNV(
  13766. m_device,
  13767. static_cast<VkAccelerationStructureNV>( accelerationStructure ),
  13768. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  13769. }
  13770. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13771. template <typename Dispatch>
  13772. VULKAN_HPP_INLINE void
  13773. Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV * pInfo,
  13774. VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR * pMemoryRequirements,
  13775. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13776. {
  13777. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13778. d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
  13779. reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ),
  13780. reinterpret_cast<VkMemoryRequirements2KHR *>( pMemoryRequirements ) );
  13781. }
  13782. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13783. template <typename Dispatch>
  13784. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR
  13785. Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,
  13786. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13787. {
  13788. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13789. VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements;
  13790. d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
  13791. reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
  13792. reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
  13793. return memoryRequirements;
  13794. }
  13795. template <typename X, typename Y, typename... Z, typename Dispatch>
  13796. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  13797. Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV & info,
  13798. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13799. {
  13800. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13801. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  13802. VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
  13803. d.vkGetAccelerationStructureMemoryRequirementsNV( m_device,
  13804. reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ),
  13805. reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
  13806. return structureChain;
  13807. }
  13808. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13809. template <typename Dispatch>
  13810. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV(
  13811. uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13812. {
  13813. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13814. return static_cast<Result>(
  13815. d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) );
  13816. }
  13817. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13818. template <typename Dispatch>
  13819. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindAccelerationStructureMemoryNV(
  13820. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos, Dispatch const & d ) const
  13821. {
  13822. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13823. VkResult result = d.vkBindAccelerationStructureMemoryNV(
  13824. m_device, bindInfos.size(), reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) );
  13825. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" );
  13826. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  13827. }
  13828. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13829. template <typename Dispatch>
  13830. VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV * pInfo,
  13831. VULKAN_HPP_NAMESPACE::Buffer instanceData,
  13832. VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
  13833. VULKAN_HPP_NAMESPACE::Bool32 update,
  13834. VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
  13835. VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
  13836. VULKAN_HPP_NAMESPACE::Buffer scratch,
  13837. VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
  13838. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13839. {
  13840. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13841. d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
  13842. reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ),
  13843. static_cast<VkBuffer>( instanceData ),
  13844. static_cast<VkDeviceSize>( instanceOffset ),
  13845. static_cast<VkBool32>( update ),
  13846. static_cast<VkAccelerationStructureNV>( dst ),
  13847. static_cast<VkAccelerationStructureNV>( src ),
  13848. static_cast<VkBuffer>( scratch ),
  13849. static_cast<VkDeviceSize>( scratchOffset ) );
  13850. }
  13851. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13852. template <typename Dispatch>
  13853. VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV & info,
  13854. VULKAN_HPP_NAMESPACE::Buffer instanceData,
  13855. VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset,
  13856. VULKAN_HPP_NAMESPACE::Bool32 update,
  13857. VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
  13858. VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
  13859. VULKAN_HPP_NAMESPACE::Buffer scratch,
  13860. VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset,
  13861. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13862. {
  13863. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13864. d.vkCmdBuildAccelerationStructureNV( m_commandBuffer,
  13865. reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ),
  13866. static_cast<VkBuffer>( instanceData ),
  13867. static_cast<VkDeviceSize>( instanceOffset ),
  13868. static_cast<VkBool32>( update ),
  13869. static_cast<VkAccelerationStructureNV>( dst ),
  13870. static_cast<VkAccelerationStructureNV>( src ),
  13871. static_cast<VkBuffer>( scratch ),
  13872. static_cast<VkDeviceSize>( scratchOffset ) );
  13873. }
  13874. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  13875. template <typename Dispatch>
  13876. VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst,
  13877. VULKAN_HPP_NAMESPACE::AccelerationStructureNV src,
  13878. VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode,
  13879. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13880. {
  13881. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13882. d.vkCmdCopyAccelerationStructureNV( m_commandBuffer,
  13883. static_cast<VkAccelerationStructureNV>( dst ),
  13884. static_cast<VkAccelerationStructureNV>( src ),
  13885. static_cast<VkCopyAccelerationStructureModeKHR>( mode ) );
  13886. }
  13887. template <typename Dispatch>
  13888. VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer,
  13889. VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset,
  13890. VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer,
  13891. VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset,
  13892. VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride,
  13893. VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer,
  13894. VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset,
  13895. VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride,
  13896. VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer,
  13897. VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset,
  13898. VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride,
  13899. uint32_t width,
  13900. uint32_t height,
  13901. uint32_t depth,
  13902. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13903. {
  13904. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13905. d.vkCmdTraceRaysNV( m_commandBuffer,
  13906. static_cast<VkBuffer>( raygenShaderBindingTableBuffer ),
  13907. static_cast<VkDeviceSize>( raygenShaderBindingOffset ),
  13908. static_cast<VkBuffer>( missShaderBindingTableBuffer ),
  13909. static_cast<VkDeviceSize>( missShaderBindingOffset ),
  13910. static_cast<VkDeviceSize>( missShaderBindingStride ),
  13911. static_cast<VkBuffer>( hitShaderBindingTableBuffer ),
  13912. static_cast<VkDeviceSize>( hitShaderBindingOffset ),
  13913. static_cast<VkDeviceSize>( hitShaderBindingStride ),
  13914. static_cast<VkBuffer>( callableShaderBindingTableBuffer ),
  13915. static_cast<VkDeviceSize>( callableShaderBindingOffset ),
  13916. static_cast<VkDeviceSize>( callableShaderBindingStride ),
  13917. width,
  13918. height,
  13919. depth );
  13920. }
  13921. template <typename Dispatch>
  13922. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  13923. uint32_t createInfoCount,
  13924. const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV * pCreateInfos,
  13925. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  13926. VULKAN_HPP_NAMESPACE::Pipeline * pPipelines,
  13927. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  13928. {
  13929. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13930. return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device,
  13931. static_cast<VkPipelineCache>( pipelineCache ),
  13932. createInfoCount,
  13933. reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ),
  13934. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  13935. reinterpret_cast<VkPipeline *>( pPipelines ) ) );
  13936. }
  13937. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  13938. template <typename PipelineAllocator, typename Dispatch>
  13939. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
  13940. Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  13941. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
  13942. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13943. Dispatch const & d ) const
  13944. {
  13945. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13946. std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
  13947. VkResult result = d.vkCreateRayTracingPipelinesNV(
  13948. m_device,
  13949. static_cast<VkPipelineCache>( pipelineCache ),
  13950. createInfos.size(),
  13951. reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
  13952. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13953. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  13954. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13955. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV",
  13956. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  13957. return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
  13958. }
  13959. template <typename PipelineAllocator,
  13960. typename Dispatch,
  13961. typename B0,
  13962. typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
  13963. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>
  13964. Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  13965. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
  13966. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13967. PipelineAllocator & pipelineAllocator,
  13968. Dispatch const & d ) const
  13969. {
  13970. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13971. std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
  13972. VkResult result = d.vkCreateRayTracingPipelinesNV(
  13973. m_device,
  13974. static_cast<VkPipelineCache>( pipelineCache ),
  13975. createInfos.size(),
  13976. reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
  13977. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13978. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  13979. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  13980. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV",
  13981. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  13982. return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
  13983. }
  13984. template <typename Dispatch>
  13985. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>
  13986. Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  13987. const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
  13988. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  13989. Dispatch const & d ) const
  13990. {
  13991. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  13992. VULKAN_HPP_NAMESPACE::Pipeline pipeline;
  13993. VkResult result = d.vkCreateRayTracingPipelinesNV(
  13994. m_device,
  13995. static_cast<VkPipelineCache>( pipelineCache ),
  13996. 1,
  13997. reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
  13998. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  13999. reinterpret_cast<VkPipeline *>( &pipeline ) );
  14000. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14001. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV",
  14002. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  14003. return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
  14004. }
  14005. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  14006. template <typename Dispatch, typename PipelineAllocator>
  14007. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
  14008. Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  14009. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
  14010. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  14011. Dispatch const & d ) const
  14012. {
  14013. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14014. std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
  14015. VkResult result = d.vkCreateRayTracingPipelinesNV(
  14016. m_device,
  14017. static_cast<VkPipelineCache>( pipelineCache ),
  14018. createInfos.size(),
  14019. reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
  14020. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  14021. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  14022. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14023. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique",
  14024. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  14025. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
  14026. uniquePipelines.reserve( createInfos.size() );
  14027. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  14028. for ( auto const & pipeline : pipelines )
  14029. {
  14030. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
  14031. }
  14032. return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
  14033. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
  14034. }
  14035. template <typename Dispatch,
  14036. typename PipelineAllocator,
  14037. typename B0,
  14038. typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
  14039. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>
  14040. Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  14041. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos,
  14042. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  14043. PipelineAllocator & pipelineAllocator,
  14044. Dispatch const & d ) const
  14045. {
  14046. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14047. std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
  14048. VkResult result = d.vkCreateRayTracingPipelinesNV(
  14049. m_device,
  14050. static_cast<VkPipelineCache>( pipelineCache ),
  14051. createInfos.size(),
  14052. reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ),
  14053. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  14054. reinterpret_cast<VkPipeline *>( pipelines.data() ) );
  14055. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14056. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique",
  14057. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  14058. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
  14059. uniquePipelines.reserve( createInfos.size() );
  14060. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  14061. for ( auto const & pipeline : pipelines )
  14062. {
  14063. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
  14064. }
  14065. return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>(
  14066. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
  14067. }
  14068. template <typename Dispatch>
  14069. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>
  14070. Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache,
  14071. const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV & createInfo,
  14072. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  14073. Dispatch const & d ) const
  14074. {
  14075. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14076. VULKAN_HPP_NAMESPACE::Pipeline pipeline;
  14077. VkResult result = d.vkCreateRayTracingPipelinesNV(
  14078. m_device,
  14079. static_cast<VkPipelineCache>( pipelineCache ),
  14080. 1,
  14081. reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ),
  14082. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  14083. reinterpret_cast<VkPipeline *>( &pipeline ) );
  14084. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14085. VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique",
  14086. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  14087. return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>(
  14088. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14089. UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  14090. }
  14091. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  14092. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14093. template <typename Dispatch>
  14094. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  14095. uint32_t firstGroup,
  14096. uint32_t groupCount,
  14097. size_t dataSize,
  14098. void * pData,
  14099. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14100. {
  14101. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14102. return static_cast<Result>(
  14103. d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
  14104. }
  14105. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14106. template <typename DataType, typename DataTypeAllocator, typename Dispatch>
  14107. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type Device::getRayTracingShaderGroupHandlesNV(
  14108. VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
  14109. {
  14110. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14111. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  14112. std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
  14113. VkResult result = d.vkGetRayTracingShaderGroupHandlesNV(
  14114. m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
  14115. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" );
  14116. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  14117. }
  14118. template <typename DataType, typename Dispatch>
  14119. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
  14120. Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
  14121. {
  14122. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14123. DataType data;
  14124. VkResult result = d.vkGetRayTracingShaderGroupHandlesNV(
  14125. m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( DataType ), reinterpret_cast<void *>( &data ) );
  14126. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" );
  14127. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  14128. }
  14129. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14130. template <typename Dispatch>
  14131. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure,
  14132. size_t dataSize,
  14133. void * pData,
  14134. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14135. {
  14136. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14137. return static_cast<Result>(
  14138. d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) );
  14139. }
  14140. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14141. template <typename DataType, typename DataTypeAllocator, typename Dispatch>
  14142. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type
  14143. Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d ) const
  14144. {
  14145. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14146. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  14147. std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
  14148. VkResult result = d.vkGetAccelerationStructureHandleNV(
  14149. m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ) );
  14150. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
  14151. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  14152. }
  14153. template <typename DataType, typename Dispatch>
  14154. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
  14155. Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Dispatch const & d ) const
  14156. {
  14157. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14158. DataType data;
  14159. VkResult result = d.vkGetAccelerationStructureHandleNV(
  14160. m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), sizeof( DataType ), reinterpret_cast<void *>( &data ) );
  14161. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
  14162. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  14163. }
  14164. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14165. template <typename Dispatch>
  14166. VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount,
  14167. const VULKAN_HPP_NAMESPACE::AccelerationStructureNV * pAccelerationStructures,
  14168. VULKAN_HPP_NAMESPACE::QueryType queryType,
  14169. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  14170. uint32_t firstQuery,
  14171. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14172. {
  14173. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14174. d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer,
  14175. accelerationStructureCount,
  14176. reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ),
  14177. static_cast<VkQueryType>( queryType ),
  14178. static_cast<VkQueryPool>( queryPool ),
  14179. firstQuery );
  14180. }
  14181. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14182. template <typename Dispatch>
  14183. VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV(
  14184. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures,
  14185. VULKAN_HPP_NAMESPACE::QueryType queryType,
  14186. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  14187. uint32_t firstQuery,
  14188. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14189. {
  14190. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14191. d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer,
  14192. accelerationStructures.size(),
  14193. reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ),
  14194. static_cast<VkQueryType>( queryType ),
  14195. static_cast<VkQueryPool>( queryPool ),
  14196. firstQuery );
  14197. }
  14198. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14199. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14200. template <typename Dispatch>
  14201. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  14202. uint32_t shader,
  14203. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14204. {
  14205. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14206. return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
  14207. }
  14208. #else
  14209. template <typename Dispatch>
  14210. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  14211. Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const
  14212. {
  14213. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14214. VkResult result = d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader );
  14215. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" );
  14216. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  14217. }
  14218. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14219. //=== VK_KHR_maintenance3 ===
  14220. template <typename Dispatch>
  14221. VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo,
  14222. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport,
  14223. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14224. {
  14225. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14226. d.vkGetDescriptorSetLayoutSupportKHR(
  14227. m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
  14228. }
  14229. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14230. template <typename Dispatch>
  14231. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport
  14232. Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
  14233. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14234. {
  14235. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14236. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
  14237. d.vkGetDescriptorSetLayoutSupportKHR(
  14238. m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
  14239. return support;
  14240. }
  14241. template <typename X, typename Y, typename... Z, typename Dispatch>
  14242. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  14243. Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo,
  14244. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14245. {
  14246. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14247. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  14248. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
  14249. d.vkGetDescriptorSetLayoutSupportKHR(
  14250. m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
  14251. return structureChain;
  14252. }
  14253. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14254. //=== VK_KHR_draw_indirect_count ===
  14255. template <typename Dispatch>
  14256. VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
  14257. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  14258. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  14259. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  14260. uint32_t maxDrawCount,
  14261. uint32_t stride,
  14262. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14263. {
  14264. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14265. d.vkCmdDrawIndirectCountKHR( m_commandBuffer,
  14266. static_cast<VkBuffer>( buffer ),
  14267. static_cast<VkDeviceSize>( offset ),
  14268. static_cast<VkBuffer>( countBuffer ),
  14269. static_cast<VkDeviceSize>( countBufferOffset ),
  14270. maxDrawCount,
  14271. stride );
  14272. }
  14273. template <typename Dispatch>
  14274. VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
  14275. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  14276. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  14277. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  14278. uint32_t maxDrawCount,
  14279. uint32_t stride,
  14280. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14281. {
  14282. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14283. d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer,
  14284. static_cast<VkBuffer>( buffer ),
  14285. static_cast<VkDeviceSize>( offset ),
  14286. static_cast<VkBuffer>( countBuffer ),
  14287. static_cast<VkDeviceSize>( countBufferOffset ),
  14288. maxDrawCount,
  14289. stride );
  14290. }
  14291. //=== VK_EXT_external_memory_host ===
  14292. template <typename Dispatch>
  14293. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  14294. Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
  14295. const void * pHostPointer,
  14296. VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties,
  14297. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14298. {
  14299. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14300. return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device,
  14301. static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
  14302. pHostPointer,
  14303. reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) );
  14304. }
  14305. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14306. template <typename Dispatch>
  14307. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type
  14308. Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
  14309. const void * pHostPointer,
  14310. Dispatch const & d ) const
  14311. {
  14312. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14313. VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
  14314. VkResult result = d.vkGetMemoryHostPointerPropertiesEXT( m_device,
  14315. static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
  14316. pHostPointer,
  14317. reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) );
  14318. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" );
  14319. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryHostPointerProperties );
  14320. }
  14321. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14322. //=== VK_AMD_buffer_marker ===
  14323. template <typename Dispatch>
  14324. VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage,
  14325. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  14326. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  14327. uint32_t marker,
  14328. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14329. {
  14330. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14331. d.vkCmdWriteBufferMarkerAMD( m_commandBuffer,
  14332. static_cast<VkPipelineStageFlagBits>( pipelineStage ),
  14333. static_cast<VkBuffer>( dstBuffer ),
  14334. static_cast<VkDeviceSize>( dstOffset ),
  14335. marker );
  14336. }
  14337. //=== VK_EXT_calibrated_timestamps ===
  14338. template <typename Dispatch>
  14339. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t * pTimeDomainCount,
  14340. VULKAN_HPP_NAMESPACE::TimeDomainEXT * pTimeDomains,
  14341. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14342. {
  14343. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14344. return static_cast<Result>(
  14345. d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( pTimeDomains ) ) );
  14346. }
  14347. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14348. template <typename TimeDomainEXTAllocator, typename Dispatch>
  14349. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator>>::type
  14350. PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const
  14351. {
  14352. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14353. std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator> timeDomains;
  14354. uint32_t timeDomainCount;
  14355. VkResult result;
  14356. do
  14357. {
  14358. result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr );
  14359. if ( ( result == VK_SUCCESS ) && timeDomainCount )
  14360. {
  14361. timeDomains.resize( timeDomainCount );
  14362. result =
  14363. d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) );
  14364. }
  14365. } while ( result == VK_INCOMPLETE );
  14366. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
  14367. VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
  14368. if ( timeDomainCount < timeDomains.size() )
  14369. {
  14370. timeDomains.resize( timeDomainCount );
  14371. }
  14372. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), timeDomains );
  14373. }
  14374. template <typename TimeDomainEXTAllocator,
  14375. typename Dispatch,
  14376. typename B1,
  14377. typename std::enable_if<std::is_same<typename B1::value_type, TimeDomainEXT>::value, int>::type>
  14378. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator>>::type
  14379. PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, Dispatch const & d ) const
  14380. {
  14381. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14382. std::vector<VULKAN_HPP_NAMESPACE::TimeDomainEXT, TimeDomainEXTAllocator> timeDomains( timeDomainEXTAllocator );
  14383. uint32_t timeDomainCount;
  14384. VkResult result;
  14385. do
  14386. {
  14387. result = d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr );
  14388. if ( ( result == VK_SUCCESS ) && timeDomainCount )
  14389. {
  14390. timeDomains.resize( timeDomainCount );
  14391. result =
  14392. d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) );
  14393. }
  14394. } while ( result == VK_INCOMPLETE );
  14395. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
  14396. VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
  14397. if ( timeDomainCount < timeDomains.size() )
  14398. {
  14399. timeDomains.resize( timeDomainCount );
  14400. }
  14401. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), timeDomains );
  14402. }
  14403. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14404. template <typename Dispatch>
  14405. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount,
  14406. const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT * pTimestampInfos,
  14407. uint64_t * pTimestamps,
  14408. uint64_t * pMaxDeviation,
  14409. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14410. {
  14411. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14412. return static_cast<Result>( d.vkGetCalibratedTimestampsEXT(
  14413. m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) );
  14414. }
  14415. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14416. template <typename Uint64_tAllocator, typename Dispatch>
  14417. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
  14418. Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
  14419. Dispatch const & d ) const
  14420. {
  14421. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14422. std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_(
  14423. std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) );
  14424. std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first;
  14425. uint64_t & maxDeviation = data_.second;
  14426. VkResult result = d.vkGetCalibratedTimestampsEXT(
  14427. m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ), timestamps.data(), &maxDeviation );
  14428. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
  14429. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
  14430. }
  14431. template <typename Uint64_tAllocator,
  14432. typename Dispatch,
  14433. typename B0,
  14434. typename std::enable_if<std::is_same<typename B0::value_type, uint64_t>::value, int>::type>
  14435. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type
  14436. Device::getCalibratedTimestampsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos,
  14437. Uint64_tAllocator & uint64_tAllocator,
  14438. Dispatch const & d ) const
  14439. {
  14440. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14441. std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t> data_(
  14442. std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) );
  14443. std::vector<uint64_t, Uint64_tAllocator> & timestamps = data_.first;
  14444. uint64_t & maxDeviation = data_.second;
  14445. VkResult result = d.vkGetCalibratedTimestampsEXT(
  14446. m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ), timestamps.data(), &maxDeviation );
  14447. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
  14448. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
  14449. }
  14450. template <typename Dispatch>
  14451. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<uint64_t, uint64_t>>::type
  14452. Device::getCalibratedTimestampEXT( const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT & timestampInfo, Dispatch const & d ) const
  14453. {
  14454. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14455. std::pair<uint64_t, uint64_t> data_;
  14456. uint64_t & timestamp = data_.first;
  14457. uint64_t & maxDeviation = data_.second;
  14458. VkResult result =
  14459. d.vkGetCalibratedTimestampsEXT( m_device, 1, reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( &timestampInfo ), &timestamp, &maxDeviation );
  14460. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampEXT" );
  14461. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
  14462. }
  14463. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14464. //=== VK_NV_mesh_shader ===
  14465. template <typename Dispatch>
  14466. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14467. {
  14468. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14469. d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
  14470. }
  14471. template <typename Dispatch>
  14472. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
  14473. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  14474. uint32_t drawCount,
  14475. uint32_t stride,
  14476. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14477. {
  14478. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14479. d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
  14480. }
  14481. template <typename Dispatch>
  14482. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer,
  14483. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  14484. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  14485. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  14486. uint32_t maxDrawCount,
  14487. uint32_t stride,
  14488. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14489. {
  14490. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14491. d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer,
  14492. static_cast<VkBuffer>( buffer ),
  14493. static_cast<VkDeviceSize>( offset ),
  14494. static_cast<VkBuffer>( countBuffer ),
  14495. static_cast<VkDeviceSize>( countBufferOffset ),
  14496. maxDrawCount,
  14497. stride );
  14498. }
  14499. //=== VK_NV_scissor_exclusive ===
  14500. template <typename Dispatch>
  14501. VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor,
  14502. uint32_t exclusiveScissorCount,
  14503. const VULKAN_HPP_NAMESPACE::Bool32 * pExclusiveScissorEnables,
  14504. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14505. {
  14506. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14507. d.vkCmdSetExclusiveScissorEnableNV(
  14508. m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkBool32 *>( pExclusiveScissorEnables ) );
  14509. }
  14510. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14511. template <typename Dispatch>
  14512. VULKAN_HPP_INLINE void
  14513. CommandBuffer::setExclusiveScissorEnableNV( uint32_t firstExclusiveScissor,
  14514. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & exclusiveScissorEnables,
  14515. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14516. {
  14517. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14518. d.vkCmdSetExclusiveScissorEnableNV(
  14519. m_commandBuffer, firstExclusiveScissor, exclusiveScissorEnables.size(), reinterpret_cast<const VkBool32 *>( exclusiveScissorEnables.data() ) );
  14520. }
  14521. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14522. template <typename Dispatch>
  14523. VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor,
  14524. uint32_t exclusiveScissorCount,
  14525. const VULKAN_HPP_NAMESPACE::Rect2D * pExclusiveScissors,
  14526. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14527. {
  14528. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14529. d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) );
  14530. }
  14531. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14532. template <typename Dispatch>
  14533. VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor,
  14534. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors,
  14535. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14536. {
  14537. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14538. d.vkCmdSetExclusiveScissorNV(
  14539. m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) );
  14540. }
  14541. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14542. //=== VK_NV_device_diagnostic_checkpoints ===
  14543. template <typename Dispatch>
  14544. VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void * pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14545. {
  14546. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14547. d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
  14548. }
  14549. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14550. template <typename CheckpointMarkerType, typename Dispatch>
  14551. VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( CheckpointMarkerType const & checkpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14552. {
  14553. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14554. d.vkCmdSetCheckpointNV( m_commandBuffer, reinterpret_cast<const void *>( &checkpointMarker ) );
  14555. }
  14556. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14557. template <typename Dispatch>
  14558. VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t * pCheckpointDataCount,
  14559. VULKAN_HPP_NAMESPACE::CheckpointDataNV * pCheckpointData,
  14560. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14561. {
  14562. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14563. d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( pCheckpointData ) );
  14564. }
  14565. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14566. template <typename CheckpointDataNVAllocator, typename Dispatch>
  14567. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator>
  14568. Queue::getCheckpointDataNV( Dispatch const & d ) const
  14569. {
  14570. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14571. std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData;
  14572. uint32_t checkpointDataCount;
  14573. d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
  14574. checkpointData.resize( checkpointDataCount );
  14575. d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
  14576. VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
  14577. if ( checkpointDataCount < checkpointData.size() )
  14578. {
  14579. checkpointData.resize( checkpointDataCount );
  14580. }
  14581. return checkpointData;
  14582. }
  14583. template <typename CheckpointDataNVAllocator,
  14584. typename Dispatch,
  14585. typename B1,
  14586. typename std::enable_if<std::is_same<typename B1::value_type, CheckpointDataNV>::value, int>::type>
  14587. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator>
  14588. Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const
  14589. {
  14590. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14591. std::vector<VULKAN_HPP_NAMESPACE::CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator );
  14592. uint32_t checkpointDataCount;
  14593. d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
  14594. checkpointData.resize( checkpointDataCount );
  14595. d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
  14596. VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
  14597. if ( checkpointDataCount < checkpointData.size() )
  14598. {
  14599. checkpointData.resize( checkpointDataCount );
  14600. }
  14601. return checkpointData;
  14602. }
  14603. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14604. //=== VK_KHR_timeline_semaphore ===
  14605. template <typename Dispatch>
  14606. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore,
  14607. uint64_t * pValue,
  14608. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14609. {
  14610. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14611. return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
  14612. }
  14613. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14614. template <typename Dispatch>
  14615. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type
  14616. Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
  14617. {
  14618. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14619. uint64_t value;
  14620. VkResult result = d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value );
  14621. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" );
  14622. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), value );
  14623. }
  14624. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14625. template <typename Dispatch>
  14626. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo,
  14627. uint64_t timeout,
  14628. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14629. {
  14630. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14631. return static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
  14632. }
  14633. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14634. template <typename Dispatch>
  14635. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  14636. Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
  14637. {
  14638. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14639. VkResult result = d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
  14640. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14641. VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR",
  14642. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
  14643. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  14644. }
  14645. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14646. template <typename Dispatch>
  14647. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo,
  14648. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14649. {
  14650. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14651. return static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
  14652. }
  14653. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14654. template <typename Dispatch>
  14655. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  14656. Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
  14657. {
  14658. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14659. VkResult result = d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
  14660. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" );
  14661. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  14662. }
  14663. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14664. //=== VK_INTEL_performance_query ===
  14665. template <typename Dispatch>
  14666. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL(
  14667. const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL * pInitializeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14668. {
  14669. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14670. return static_cast<Result>(
  14671. d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) );
  14672. }
  14673. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14674. template <typename Dispatch>
  14675. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  14676. Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const
  14677. {
  14678. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14679. VkResult result = d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) );
  14680. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" );
  14681. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  14682. }
  14683. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14684. template <typename Dispatch>
  14685. VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14686. {
  14687. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14688. d.vkUninitializePerformanceApiINTEL( m_device );
  14689. }
  14690. template <typename Dispatch>
  14691. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL * pMarkerInfo,
  14692. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14693. {
  14694. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14695. return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) );
  14696. }
  14697. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14698. template <typename Dispatch>
  14699. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  14700. CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const
  14701. {
  14702. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14703. VkResult result = d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) );
  14704. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" );
  14705. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  14706. }
  14707. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14708. template <typename Dispatch>
  14709. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL(
  14710. const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14711. {
  14712. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14713. return static_cast<Result>(
  14714. d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) );
  14715. }
  14716. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14717. template <typename Dispatch>
  14718. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  14719. CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const
  14720. {
  14721. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14722. VkResult result = d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) );
  14723. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" );
  14724. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  14725. }
  14726. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14727. template <typename Dispatch>
  14728. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL(
  14729. const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL * pOverrideInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14730. {
  14731. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14732. return static_cast<Result>(
  14733. d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) );
  14734. }
  14735. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14736. template <typename Dispatch>
  14737. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  14738. CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d ) const
  14739. {
  14740. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14741. VkResult result = d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) );
  14742. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" );
  14743. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  14744. }
  14745. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14746. template <typename Dispatch>
  14747. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  14748. Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL * pAcquireInfo,
  14749. VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL * pConfiguration,
  14750. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14751. {
  14752. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14753. return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device,
  14754. reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ),
  14755. reinterpret_cast<VkPerformanceConfigurationINTEL *>( pConfiguration ) ) );
  14756. }
  14757. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14758. template <typename Dispatch>
  14759. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type
  14760. Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const
  14761. {
  14762. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14763. VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
  14764. VkResult result = d.vkAcquirePerformanceConfigurationINTEL( m_device,
  14765. reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
  14766. reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) );
  14767. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" );
  14768. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), configuration );
  14769. }
  14770. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  14771. template <typename Dispatch>
  14772. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type
  14773. Device::acquirePerformanceConfigurationINTELUnique( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL & acquireInfo,
  14774. Dispatch const & d ) const
  14775. {
  14776. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14777. VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
  14778. VkResult result = d.vkAcquirePerformanceConfigurationINTEL( m_device,
  14779. reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ),
  14780. reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) );
  14781. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique" );
  14782. return createResultValueType(
  14783. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14784. UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>( configuration, ObjectRelease<Device, Dispatch>( *this, d ) ) );
  14785. }
  14786. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  14787. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14788. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14789. template <typename Dispatch>
  14790. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
  14791. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14792. {
  14793. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14794. return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
  14795. }
  14796. #else
  14797. template <typename Dispatch>
  14798. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  14799. Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
  14800. {
  14801. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14802. VkResult result = d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
  14803. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" );
  14804. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  14805. }
  14806. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14807. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14808. template <typename Dispatch>
  14809. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
  14810. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14811. {
  14812. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14813. return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
  14814. }
  14815. #else
  14816. template <typename Dispatch>
  14817. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  14818. Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
  14819. {
  14820. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14821. VkResult result = d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
  14822. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::release" );
  14823. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  14824. }
  14825. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14826. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14827. template <typename Dispatch>
  14828. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration,
  14829. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14830. {
  14831. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14832. return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
  14833. }
  14834. #else
  14835. template <typename Dispatch>
  14836. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  14837. Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
  14838. {
  14839. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14840. VkResult result = d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) );
  14841. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" );
  14842. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  14843. }
  14844. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  14845. template <typename Dispatch>
  14846. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter,
  14847. VULKAN_HPP_NAMESPACE::PerformanceValueINTEL * pValue,
  14848. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14849. {
  14850. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14851. return static_cast<Result>( d.vkGetPerformanceParameterINTEL(
  14852. m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( pValue ) ) );
  14853. }
  14854. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14855. template <typename Dispatch>
  14856. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type
  14857. Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const & d ) const
  14858. {
  14859. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14860. VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value;
  14861. VkResult result = d.vkGetPerformanceParameterINTEL(
  14862. m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( &value ) );
  14863. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" );
  14864. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), value );
  14865. }
  14866. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14867. //=== VK_AMD_display_native_hdr ===
  14868. template <typename Dispatch>
  14869. VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain,
  14870. VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable,
  14871. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14872. {
  14873. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14874. d.vkSetLocalDimmingAMD( m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
  14875. }
  14876. #if defined( VK_USE_PLATFORM_FUCHSIA )
  14877. //=== VK_FUCHSIA_imagepipe_surface ===
  14878. template <typename Dispatch>
  14879. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  14880. Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo,
  14881. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  14882. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  14883. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14884. {
  14885. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14886. return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance,
  14887. reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ),
  14888. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  14889. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  14890. }
  14891. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14892. template <typename Dispatch>
  14893. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  14894. Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
  14895. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  14896. Dispatch const & d ) const
  14897. {
  14898. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14899. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  14900. VkResult result = d.vkCreateImagePipeSurfaceFUCHSIA(
  14901. m_instance,
  14902. reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ),
  14903. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  14904. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  14905. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" );
  14906. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  14907. }
  14908. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  14909. template <typename Dispatch>
  14910. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  14911. Instance::createImagePipeSurfaceFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA & createInfo,
  14912. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  14913. Dispatch const & d ) const
  14914. {
  14915. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14916. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  14917. VkResult result = d.vkCreateImagePipeSurfaceFUCHSIA(
  14918. m_instance,
  14919. reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ),
  14920. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  14921. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  14922. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique" );
  14923. return createResultValueType(
  14924. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14925. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  14926. }
  14927. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  14928. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14929. #endif /*VK_USE_PLATFORM_FUCHSIA*/
  14930. #if defined( VK_USE_PLATFORM_METAL_EXT )
  14931. //=== VK_EXT_metal_surface ===
  14932. template <typename Dispatch>
  14933. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT * pCreateInfo,
  14934. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  14935. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  14936. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14937. {
  14938. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14939. return static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance,
  14940. reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ),
  14941. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  14942. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  14943. }
  14944. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14945. template <typename Dispatch>
  14946. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  14947. Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,
  14948. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  14949. Dispatch const & d ) const
  14950. {
  14951. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14952. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  14953. VkResult result =
  14954. d.vkCreateMetalSurfaceEXT( m_instance,
  14955. reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ),
  14956. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  14957. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  14958. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" );
  14959. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  14960. }
  14961. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  14962. template <typename Dispatch>
  14963. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  14964. Instance::createMetalSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT & createInfo,
  14965. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  14966. Dispatch const & d ) const
  14967. {
  14968. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14969. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  14970. VkResult result =
  14971. d.vkCreateMetalSurfaceEXT( m_instance,
  14972. reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ),
  14973. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  14974. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  14975. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique" );
  14976. return createResultValueType(
  14977. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  14978. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  14979. }
  14980. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  14981. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  14982. #endif /*VK_USE_PLATFORM_METAL_EXT*/
  14983. //=== VK_KHR_fragment_shading_rate ===
  14984. template <typename Dispatch>
  14985. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  14986. PhysicalDevice::getFragmentShadingRatesKHR( uint32_t * pFragmentShadingRateCount,
  14987. VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates,
  14988. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  14989. {
  14990. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  14991. return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
  14992. m_physicalDevice, pFragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) );
  14993. }
  14994. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  14995. template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, typename Dispatch>
  14996. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  14997. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
  14998. PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const
  14999. {
  15000. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15001. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates;
  15002. uint32_t fragmentShadingRateCount;
  15003. VkResult result;
  15004. do
  15005. {
  15006. result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr );
  15007. if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount )
  15008. {
  15009. fragmentShadingRates.resize( fragmentShadingRateCount );
  15010. result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
  15011. m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) );
  15012. }
  15013. } while ( result == VK_INCOMPLETE );
  15014. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
  15015. VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
  15016. if ( fragmentShadingRateCount < fragmentShadingRates.size() )
  15017. {
  15018. fragmentShadingRates.resize( fragmentShadingRateCount );
  15019. }
  15020. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fragmentShadingRates );
  15021. }
  15022. template <typename PhysicalDeviceFragmentShadingRateKHRAllocator,
  15023. typename Dispatch,
  15024. typename B1,
  15025. typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceFragmentShadingRateKHR>::value, int>::type>
  15026. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  15027. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type
  15028. PhysicalDevice::getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator,
  15029. Dispatch const & d ) const
  15030. {
  15031. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15032. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates(
  15033. physicalDeviceFragmentShadingRateKHRAllocator );
  15034. uint32_t fragmentShadingRateCount;
  15035. VkResult result;
  15036. do
  15037. {
  15038. result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr );
  15039. if ( ( result == VK_SUCCESS ) && fragmentShadingRateCount )
  15040. {
  15041. fragmentShadingRates.resize( fragmentShadingRateCount );
  15042. result = d.vkGetPhysicalDeviceFragmentShadingRatesKHR(
  15043. m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) );
  15044. }
  15045. } while ( result == VK_INCOMPLETE );
  15046. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getFragmentShadingRatesKHR" );
  15047. VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
  15048. if ( fragmentShadingRateCount < fragmentShadingRates.size() )
  15049. {
  15050. fragmentShadingRates.resize( fragmentShadingRateCount );
  15051. }
  15052. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fragmentShadingRates );
  15053. }
  15054. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15055. template <typename Dispatch>
  15056. VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D * pFragmentSize,
  15057. const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
  15058. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15059. {
  15060. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15061. d.vkCmdSetFragmentShadingRateKHR(
  15062. m_commandBuffer, reinterpret_cast<const VkExtent2D *>( pFragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
  15063. }
  15064. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15065. template <typename Dispatch>
  15066. VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D & fragmentSize,
  15067. const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
  15068. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15069. {
  15070. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15071. d.vkCmdSetFragmentShadingRateKHR(
  15072. m_commandBuffer, reinterpret_cast<const VkExtent2D *>( &fragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
  15073. }
  15074. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15075. //=== VK_EXT_buffer_device_address ===
  15076. template <typename Dispatch>
  15077. VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
  15078. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15079. {
  15080. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15081. return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
  15082. }
  15083. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15084. template <typename Dispatch>
  15085. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
  15086. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15087. {
  15088. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15089. VkDeviceAddress result = d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  15090. return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
  15091. }
  15092. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15093. //=== VK_EXT_tooling_info ===
  15094. template <typename Dispatch>
  15095. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t * pToolCount,
  15096. VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties,
  15097. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15098. {
  15099. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15100. return static_cast<Result>(
  15101. d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
  15102. }
  15103. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15104. template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch>
  15105. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  15106. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
  15107. PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const
  15108. {
  15109. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15110. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties;
  15111. uint32_t toolCount;
  15112. VkResult result;
  15113. do
  15114. {
  15115. result = d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr );
  15116. if ( ( result == VK_SUCCESS ) && toolCount )
  15117. {
  15118. toolProperties.resize( toolCount );
  15119. result =
  15120. d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
  15121. }
  15122. } while ( result == VK_INCOMPLETE );
  15123. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
  15124. VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
  15125. if ( toolCount < toolProperties.size() )
  15126. {
  15127. toolProperties.resize( toolCount );
  15128. }
  15129. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
  15130. }
  15131. template <typename PhysicalDeviceToolPropertiesAllocator,
  15132. typename Dispatch,
  15133. typename B1,
  15134. typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceToolProperties>::value, int>::type>
  15135. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  15136. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type
  15137. PhysicalDevice::getToolPropertiesEXT( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const
  15138. {
  15139. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15140. std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties(
  15141. physicalDeviceToolPropertiesAllocator );
  15142. uint32_t toolCount;
  15143. VkResult result;
  15144. do
  15145. {
  15146. result = d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr );
  15147. if ( ( result == VK_SUCCESS ) && toolCount )
  15148. {
  15149. toolProperties.resize( toolCount );
  15150. result =
  15151. d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
  15152. }
  15153. } while ( result == VK_INCOMPLETE );
  15154. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolPropertiesEXT" );
  15155. VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
  15156. if ( toolCount < toolProperties.size() )
  15157. {
  15158. toolProperties.resize( toolCount );
  15159. }
  15160. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
  15161. }
  15162. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15163. //=== VK_KHR_present_wait ===
  15164. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15165. template <typename Dispatch>
  15166. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  15167. uint64_t presentId,
  15168. uint64_t timeout,
  15169. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15170. {
  15171. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15172. return static_cast<Result>( d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout ) );
  15173. }
  15174. #else
  15175. template <typename Dispatch>
  15176. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  15177. Device::waitForPresentKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t presentId, uint64_t timeout, Dispatch const & d ) const
  15178. {
  15179. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15180. VkResult result = d.vkWaitForPresentKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), presentId, timeout );
  15181. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  15182. VULKAN_HPP_NAMESPACE_STRING "::Device::waitForPresentKHR",
  15183. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  15184. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  15185. }
  15186. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15187. //=== VK_NV_cooperative_matrix ===
  15188. template <typename Dispatch>
  15189. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV(
  15190. uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15191. {
  15192. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15193. return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
  15194. m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( pProperties ) ) );
  15195. }
  15196. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15197. template <typename CooperativeMatrixPropertiesNVAllocator, typename Dispatch>
  15198. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  15199. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
  15200. PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const
  15201. {
  15202. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15203. std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties;
  15204. uint32_t propertyCount;
  15205. VkResult result;
  15206. do
  15207. {
  15208. result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr );
  15209. if ( ( result == VK_SUCCESS ) && propertyCount )
  15210. {
  15211. properties.resize( propertyCount );
  15212. result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
  15213. m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) );
  15214. }
  15215. } while ( result == VK_INCOMPLETE );
  15216. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
  15217. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  15218. if ( propertyCount < properties.size() )
  15219. {
  15220. properties.resize( propertyCount );
  15221. }
  15222. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  15223. }
  15224. template <typename CooperativeMatrixPropertiesNVAllocator,
  15225. typename Dispatch,
  15226. typename B1,
  15227. typename std::enable_if<std::is_same<typename B1::value_type, CooperativeMatrixPropertiesNV>::value, int>::type>
  15228. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  15229. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type
  15230. PhysicalDevice::getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator,
  15231. Dispatch const & d ) const
  15232. {
  15233. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15234. std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties(
  15235. cooperativeMatrixPropertiesNVAllocator );
  15236. uint32_t propertyCount;
  15237. VkResult result;
  15238. do
  15239. {
  15240. result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr );
  15241. if ( ( result == VK_SUCCESS ) && propertyCount )
  15242. {
  15243. properties.resize( propertyCount );
  15244. result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
  15245. m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) );
  15246. }
  15247. } while ( result == VK_INCOMPLETE );
  15248. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
  15249. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  15250. if ( propertyCount < properties.size() )
  15251. {
  15252. properties.resize( propertyCount );
  15253. }
  15254. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  15255. }
  15256. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15257. //=== VK_NV_coverage_reduction_mode ===
  15258. template <typename Dispatch>
  15259. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(
  15260. uint32_t * pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV * pCombinations, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15261. {
  15262. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15263. return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
  15264. m_physicalDevice, pCombinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) );
  15265. }
  15266. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15267. template <typename FramebufferMixedSamplesCombinationNVAllocator, typename Dispatch>
  15268. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  15269. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
  15270. PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const
  15271. {
  15272. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15273. std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations;
  15274. uint32_t combinationCount;
  15275. VkResult result;
  15276. do
  15277. {
  15278. result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr );
  15279. if ( ( result == VK_SUCCESS ) && combinationCount )
  15280. {
  15281. combinations.resize( combinationCount );
  15282. result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
  15283. m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) );
  15284. }
  15285. } while ( result == VK_INCOMPLETE );
  15286. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  15287. VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
  15288. VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
  15289. if ( combinationCount < combinations.size() )
  15290. {
  15291. combinations.resize( combinationCount );
  15292. }
  15293. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), combinations );
  15294. }
  15295. template <typename FramebufferMixedSamplesCombinationNVAllocator,
  15296. typename Dispatch,
  15297. typename B1,
  15298. typename std::enable_if<std::is_same<typename B1::value_type, FramebufferMixedSamplesCombinationNV>::value, int>::type>
  15299. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  15300. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type
  15301. PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV(
  15302. FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d ) const
  15303. {
  15304. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15305. std::vector<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations(
  15306. framebufferMixedSamplesCombinationNVAllocator );
  15307. uint32_t combinationCount;
  15308. VkResult result;
  15309. do
  15310. {
  15311. result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr );
  15312. if ( ( result == VK_SUCCESS ) && combinationCount )
  15313. {
  15314. combinations.resize( combinationCount );
  15315. result = d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
  15316. m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) );
  15317. }
  15318. } while ( result == VK_INCOMPLETE );
  15319. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  15320. VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
  15321. VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
  15322. if ( combinationCount < combinations.size() )
  15323. {
  15324. combinations.resize( combinationCount );
  15325. }
  15326. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), combinations );
  15327. }
  15328. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15329. #if defined( VK_USE_PLATFORM_WIN32_KHR )
  15330. //=== VK_EXT_full_screen_exclusive ===
  15331. template <typename Dispatch>
  15332. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  15333. PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
  15334. uint32_t * pPresentModeCount,
  15335. VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes,
  15336. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15337. {
  15338. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15339. return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
  15340. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ),
  15341. pPresentModeCount,
  15342. reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) );
  15343. }
  15344. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15345. template <typename PresentModeKHRAllocator, typename Dispatch>
  15346. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
  15347. PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
  15348. {
  15349. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15350. std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes;
  15351. uint32_t presentModeCount;
  15352. VkResult result;
  15353. do
  15354. {
  15355. result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
  15356. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr );
  15357. if ( ( result == VK_SUCCESS ) && presentModeCount )
  15358. {
  15359. presentModes.resize( presentModeCount );
  15360. result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
  15361. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  15362. &presentModeCount,
  15363. reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
  15364. }
  15365. } while ( result == VK_INCOMPLETE );
  15366. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
  15367. VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
  15368. if ( presentModeCount < presentModes.size() )
  15369. {
  15370. presentModes.resize( presentModeCount );
  15371. }
  15372. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
  15373. }
  15374. template <typename PresentModeKHRAllocator,
  15375. typename Dispatch,
  15376. typename B1,
  15377. typename std::enable_if<std::is_same<typename B1::value_type, PresentModeKHR>::value, int>::type>
  15378. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type
  15379. PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo,
  15380. PresentModeKHRAllocator & presentModeKHRAllocator,
  15381. Dispatch const & d ) const
  15382. {
  15383. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15384. std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
  15385. uint32_t presentModeCount;
  15386. VkResult result;
  15387. do
  15388. {
  15389. result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT(
  15390. m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr );
  15391. if ( ( result == VK_SUCCESS ) && presentModeCount )
  15392. {
  15393. presentModes.resize( presentModeCount );
  15394. result = d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice,
  15395. reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ),
  15396. &presentModeCount,
  15397. reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) );
  15398. }
  15399. } while ( result == VK_INCOMPLETE );
  15400. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModes2EXT" );
  15401. VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
  15402. if ( presentModeCount < presentModes.size() )
  15403. {
  15404. presentModes.resize( presentModeCount );
  15405. }
  15406. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes );
  15407. }
  15408. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15409. # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15410. template <typename Dispatch>
  15411. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  15412. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15413. {
  15414. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15415. return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
  15416. }
  15417. # else
  15418. template <typename Dispatch>
  15419. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  15420. Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  15421. {
  15422. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15423. VkResult result = d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
  15424. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" );
  15425. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  15426. }
  15427. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15428. # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15429. template <typename Dispatch>
  15430. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  15431. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15432. {
  15433. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15434. return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
  15435. }
  15436. # else
  15437. template <typename Dispatch>
  15438. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  15439. Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  15440. {
  15441. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15442. VkResult result = d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) );
  15443. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" );
  15444. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  15445. }
  15446. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15447. template <typename Dispatch>
  15448. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  15449. Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo,
  15450. VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes,
  15451. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15452. {
  15453. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15454. return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT(
  15455. m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
  15456. }
  15457. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15458. template <typename Dispatch>
  15459. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type
  15460. Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
  15461. {
  15462. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15463. VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
  15464. VkResult result = d.vkGetDeviceGroupSurfacePresentModes2EXT(
  15465. m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) );
  15466. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" );
  15467. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), modes );
  15468. }
  15469. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15470. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  15471. //=== VK_EXT_headless_surface ===
  15472. template <typename Dispatch>
  15473. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT * pCreateInfo,
  15474. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  15475. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  15476. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15477. {
  15478. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15479. return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance,
  15480. reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ),
  15481. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  15482. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  15483. }
  15484. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15485. template <typename Dispatch>
  15486. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  15487. Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,
  15488. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  15489. Dispatch const & d ) const
  15490. {
  15491. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15492. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  15493. VkResult result = d.vkCreateHeadlessSurfaceEXT(
  15494. m_instance,
  15495. reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
  15496. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  15497. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  15498. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" );
  15499. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  15500. }
  15501. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  15502. template <typename Dispatch>
  15503. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  15504. Instance::createHeadlessSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT & createInfo,
  15505. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  15506. Dispatch const & d ) const
  15507. {
  15508. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15509. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  15510. VkResult result = d.vkCreateHeadlessSurfaceEXT(
  15511. m_instance,
  15512. reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ),
  15513. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  15514. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  15515. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique" );
  15516. return createResultValueType(
  15517. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  15518. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  15519. }
  15520. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  15521. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15522. //=== VK_KHR_buffer_device_address ===
  15523. template <typename Dispatch>
  15524. VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
  15525. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15526. {
  15527. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15528. return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
  15529. }
  15530. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15531. template <typename Dispatch>
  15532. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
  15533. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15534. {
  15535. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15536. VkDeviceAddress result = d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  15537. return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
  15538. }
  15539. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15540. template <typename Dispatch>
  15541. VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo,
  15542. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15543. {
  15544. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15545. return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
  15546. }
  15547. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15548. template <typename Dispatch>
  15549. VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info,
  15550. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15551. {
  15552. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15553. uint64_t result = d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  15554. return result;
  15555. }
  15556. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15557. template <typename Dispatch>
  15558. VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo,
  15559. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15560. {
  15561. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15562. return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
  15563. }
  15564. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15565. template <typename Dispatch>
  15566. VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info,
  15567. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15568. {
  15569. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15570. uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
  15571. return result;
  15572. }
  15573. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15574. //=== VK_EXT_line_rasterization ===
  15575. template <typename Dispatch>
  15576. VULKAN_HPP_INLINE void
  15577. CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15578. {
  15579. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15580. d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
  15581. }
  15582. //=== VK_EXT_host_query_reset ===
  15583. template <typename Dispatch>
  15584. VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  15585. uint32_t firstQuery,
  15586. uint32_t queryCount,
  15587. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15588. {
  15589. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15590. d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
  15591. }
  15592. //=== VK_EXT_extended_dynamic_state ===
  15593. template <typename Dispatch>
  15594. VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15595. {
  15596. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15597. d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
  15598. }
  15599. template <typename Dispatch>
  15600. VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15601. {
  15602. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15603. d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
  15604. }
  15605. template <typename Dispatch>
  15606. VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology,
  15607. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15608. {
  15609. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15610. d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
  15611. }
  15612. template <typename Dispatch>
  15613. VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount,
  15614. const VULKAN_HPP_NAMESPACE::Viewport * pViewports,
  15615. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15616. {
  15617. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15618. d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
  15619. }
  15620. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15621. template <typename Dispatch>
  15622. VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports,
  15623. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15624. {
  15625. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15626. d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
  15627. }
  15628. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15629. template <typename Dispatch>
  15630. VULKAN_HPP_INLINE void
  15631. CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15632. {
  15633. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15634. d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
  15635. }
  15636. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15637. template <typename Dispatch>
  15638. VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors,
  15639. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15640. {
  15641. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15642. d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
  15643. }
  15644. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15645. template <typename Dispatch>
  15646. VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding,
  15647. uint32_t bindingCount,
  15648. const VULKAN_HPP_NAMESPACE::Buffer * pBuffers,
  15649. const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
  15650. const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes,
  15651. const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides,
  15652. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15653. {
  15654. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15655. d.vkCmdBindVertexBuffers2EXT( m_commandBuffer,
  15656. firstBinding,
  15657. bindingCount,
  15658. reinterpret_cast<const VkBuffer *>( pBuffers ),
  15659. reinterpret_cast<const VkDeviceSize *>( pOffsets ),
  15660. reinterpret_cast<const VkDeviceSize *>( pSizes ),
  15661. reinterpret_cast<const VkDeviceSize *>( pStrides ) );
  15662. }
  15663. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15664. template <typename Dispatch>
  15665. VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding,
  15666. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers,
  15667. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
  15668. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes,
  15669. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides,
  15670. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  15671. {
  15672. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15673. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  15674. VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
  15675. VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
  15676. VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
  15677. # else
  15678. if ( buffers.size() != offsets.size() )
  15679. {
  15680. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
  15681. }
  15682. if ( !sizes.empty() && buffers.size() != sizes.size() )
  15683. {
  15684. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
  15685. }
  15686. if ( !strides.empty() && buffers.size() != strides.size() )
  15687. {
  15688. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
  15689. }
  15690. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  15691. d.vkCmdBindVertexBuffers2EXT( m_commandBuffer,
  15692. firstBinding,
  15693. buffers.size(),
  15694. reinterpret_cast<const VkBuffer *>( buffers.data() ),
  15695. reinterpret_cast<const VkDeviceSize *>( offsets.data() ),
  15696. reinterpret_cast<const VkDeviceSize *>( sizes.data() ),
  15697. reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
  15698. }
  15699. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15700. template <typename Dispatch>
  15701. VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15702. {
  15703. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15704. d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
  15705. }
  15706. template <typename Dispatch>
  15707. VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15708. {
  15709. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15710. d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
  15711. }
  15712. template <typename Dispatch>
  15713. VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15714. {
  15715. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15716. d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
  15717. }
  15718. template <typename Dispatch>
  15719. VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable,
  15720. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15721. {
  15722. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15723. d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
  15724. }
  15725. template <typename Dispatch>
  15726. VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15727. {
  15728. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15729. d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
  15730. }
  15731. template <typename Dispatch>
  15732. VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask,
  15733. VULKAN_HPP_NAMESPACE::StencilOp failOp,
  15734. VULKAN_HPP_NAMESPACE::StencilOp passOp,
  15735. VULKAN_HPP_NAMESPACE::StencilOp depthFailOp,
  15736. VULKAN_HPP_NAMESPACE::CompareOp compareOp,
  15737. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15738. {
  15739. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15740. d.vkCmdSetStencilOpEXT( m_commandBuffer,
  15741. static_cast<VkStencilFaceFlags>( faceMask ),
  15742. static_cast<VkStencilOp>( failOp ),
  15743. static_cast<VkStencilOp>( passOp ),
  15744. static_cast<VkStencilOp>( depthFailOp ),
  15745. static_cast<VkCompareOp>( compareOp ) );
  15746. }
  15747. //=== VK_KHR_deferred_host_operations ===
  15748. template <typename Dispatch>
  15749. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  15750. VULKAN_HPP_NAMESPACE::DeferredOperationKHR * pDeferredOperation,
  15751. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15752. {
  15753. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15754. return static_cast<Result>( d.vkCreateDeferredOperationKHR(
  15755. m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDeferredOperationKHR *>( pDeferredOperation ) ) );
  15756. }
  15757. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15758. template <typename Dispatch>
  15759. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type
  15760. Device::createDeferredOperationKHR( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  15761. {
  15762. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15763. VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
  15764. VkResult result = d.vkCreateDeferredOperationKHR(
  15765. m_device,
  15766. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  15767. reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) );
  15768. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" );
  15769. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), deferredOperation );
  15770. }
  15771. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  15772. template <typename Dispatch>
  15773. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type
  15774. Device::createDeferredOperationKHRUnique( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
  15775. {
  15776. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15777. VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
  15778. VkResult result = d.vkCreateDeferredOperationKHR(
  15779. m_device,
  15780. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  15781. reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) );
  15782. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique" );
  15783. return createResultValueType(
  15784. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  15785. UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>( deferredOperation, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  15786. }
  15787. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  15788. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15789. template <typename Dispatch>
  15790. VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
  15791. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  15792. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15793. {
  15794. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15795. d.vkDestroyDeferredOperationKHR(
  15796. m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  15797. }
  15798. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15799. template <typename Dispatch>
  15800. VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
  15801. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  15802. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15803. {
  15804. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15805. d.vkDestroyDeferredOperationKHR(
  15806. m_device,
  15807. static_cast<VkDeferredOperationKHR>( operation ),
  15808. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  15809. }
  15810. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15811. template <typename Dispatch>
  15812. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
  15813. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  15814. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15815. {
  15816. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15817. d.vkDestroyDeferredOperationKHR(
  15818. m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  15819. }
  15820. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15821. template <typename Dispatch>
  15822. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
  15823. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  15824. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15825. {
  15826. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15827. d.vkDestroyDeferredOperationKHR(
  15828. m_device,
  15829. static_cast<VkDeferredOperationKHR>( operation ),
  15830. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  15831. }
  15832. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15833. template <typename Dispatch>
  15834. VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
  15835. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15836. {
  15837. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15838. return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
  15839. }
  15840. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15841. template <typename Dispatch>
  15842. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
  15843. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15844. {
  15845. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15846. return static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
  15847. }
  15848. #else
  15849. template <typename Dispatch>
  15850. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  15851. Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15852. {
  15853. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15854. VkResult result = d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
  15855. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  15856. }
  15857. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15858. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15859. template <typename Dispatch>
  15860. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
  15861. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15862. {
  15863. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15864. return static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
  15865. }
  15866. #else
  15867. template <typename Dispatch>
  15868. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation,
  15869. Dispatch const & d ) const
  15870. {
  15871. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15872. VkResult result = d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
  15873. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  15874. VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR",
  15875. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } );
  15876. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  15877. }
  15878. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  15879. //=== VK_KHR_pipeline_executable_properties ===
  15880. template <typename Dispatch>
  15881. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR * pPipelineInfo,
  15882. uint32_t * pExecutableCount,
  15883. VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR * pProperties,
  15884. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15885. {
  15886. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15887. return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device,
  15888. reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ),
  15889. pExecutableCount,
  15890. reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( pProperties ) ) );
  15891. }
  15892. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15893. template <typename PipelineExecutablePropertiesKHRAllocator, typename Dispatch>
  15894. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  15895. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
  15896. Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const
  15897. {
  15898. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15899. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties;
  15900. uint32_t executableCount;
  15901. VkResult result;
  15902. do
  15903. {
  15904. result = d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr );
  15905. if ( ( result == VK_SUCCESS ) && executableCount )
  15906. {
  15907. properties.resize( executableCount );
  15908. result = d.vkGetPipelineExecutablePropertiesKHR( m_device,
  15909. reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
  15910. &executableCount,
  15911. reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) );
  15912. }
  15913. } while ( result == VK_INCOMPLETE );
  15914. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
  15915. VULKAN_HPP_ASSERT( executableCount <= properties.size() );
  15916. if ( executableCount < properties.size() )
  15917. {
  15918. properties.resize( executableCount );
  15919. }
  15920. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  15921. }
  15922. template <typename PipelineExecutablePropertiesKHRAllocator,
  15923. typename Dispatch,
  15924. typename B1,
  15925. typename std::enable_if<std::is_same<typename B1::value_type, PipelineExecutablePropertiesKHR>::value, int>::type>
  15926. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  15927. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type
  15928. Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR & pipelineInfo,
  15929. PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator,
  15930. Dispatch const & d ) const
  15931. {
  15932. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15933. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties(
  15934. pipelineExecutablePropertiesKHRAllocator );
  15935. uint32_t executableCount;
  15936. VkResult result;
  15937. do
  15938. {
  15939. result = d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr );
  15940. if ( ( result == VK_SUCCESS ) && executableCount )
  15941. {
  15942. properties.resize( executableCount );
  15943. result = d.vkGetPipelineExecutablePropertiesKHR( m_device,
  15944. reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ),
  15945. &executableCount,
  15946. reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) );
  15947. }
  15948. } while ( result == VK_INCOMPLETE );
  15949. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutablePropertiesKHR" );
  15950. VULKAN_HPP_ASSERT( executableCount <= properties.size() );
  15951. if ( executableCount < properties.size() )
  15952. {
  15953. properties.resize( executableCount );
  15954. }
  15955. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  15956. }
  15957. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  15958. template <typename Dispatch>
  15959. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  15960. Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
  15961. uint32_t * pStatisticCount,
  15962. VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR * pStatistics,
  15963. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  15964. {
  15965. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15966. return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device,
  15967. reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
  15968. pStatisticCount,
  15969. reinterpret_cast<VkPipelineExecutableStatisticKHR *>( pStatistics ) ) );
  15970. }
  15971. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  15972. template <typename PipelineExecutableStatisticKHRAllocator, typename Dispatch>
  15973. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  15974. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
  15975. Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const
  15976. {
  15977. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  15978. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics;
  15979. uint32_t statisticCount;
  15980. VkResult result;
  15981. do
  15982. {
  15983. result =
  15984. d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr );
  15985. if ( ( result == VK_SUCCESS ) && statisticCount )
  15986. {
  15987. statistics.resize( statisticCount );
  15988. result = d.vkGetPipelineExecutableStatisticsKHR( m_device,
  15989. reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
  15990. &statisticCount,
  15991. reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) );
  15992. }
  15993. } while ( result == VK_INCOMPLETE );
  15994. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
  15995. VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
  15996. if ( statisticCount < statistics.size() )
  15997. {
  15998. statistics.resize( statisticCount );
  15999. }
  16000. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), statistics );
  16001. }
  16002. template <typename PipelineExecutableStatisticKHRAllocator,
  16003. typename Dispatch,
  16004. typename B1,
  16005. typename std::enable_if<std::is_same<typename B1::value_type, PipelineExecutableStatisticKHR>::value, int>::type>
  16006. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  16007. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type
  16008. Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,
  16009. PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator,
  16010. Dispatch const & d ) const
  16011. {
  16012. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16013. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics(
  16014. pipelineExecutableStatisticKHRAllocator );
  16015. uint32_t statisticCount;
  16016. VkResult result;
  16017. do
  16018. {
  16019. result =
  16020. d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr );
  16021. if ( ( result == VK_SUCCESS ) && statisticCount )
  16022. {
  16023. statistics.resize( statisticCount );
  16024. result = d.vkGetPipelineExecutableStatisticsKHR( m_device,
  16025. reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
  16026. &statisticCount,
  16027. reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) );
  16028. }
  16029. } while ( result == VK_INCOMPLETE );
  16030. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableStatisticsKHR" );
  16031. VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
  16032. if ( statisticCount < statistics.size() )
  16033. {
  16034. statistics.resize( statisticCount );
  16035. }
  16036. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), statistics );
  16037. }
  16038. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16039. template <typename Dispatch>
  16040. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  16041. Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR * pExecutableInfo,
  16042. uint32_t * pInternalRepresentationCount,
  16043. VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR * pInternalRepresentations,
  16044. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16045. {
  16046. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16047. return static_cast<Result>(
  16048. d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device,
  16049. reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ),
  16050. pInternalRepresentationCount,
  16051. reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) );
  16052. }
  16053. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16054. template <typename PipelineExecutableInternalRepresentationKHRAllocator, typename Dispatch>
  16055. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
  16056. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type
  16057. Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const
  16058. {
  16059. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16060. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
  16061. internalRepresentations;
  16062. uint32_t internalRepresentationCount;
  16063. VkResult result;
  16064. do
  16065. {
  16066. result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
  16067. m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr );
  16068. if ( ( result == VK_SUCCESS ) && internalRepresentationCount )
  16069. {
  16070. internalRepresentations.resize( internalRepresentationCount );
  16071. result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
  16072. m_device,
  16073. reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
  16074. &internalRepresentationCount,
  16075. reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) );
  16076. }
  16077. } while ( result == VK_INCOMPLETE );
  16078. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
  16079. VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
  16080. if ( internalRepresentationCount < internalRepresentations.size() )
  16081. {
  16082. internalRepresentations.resize( internalRepresentationCount );
  16083. }
  16084. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), internalRepresentations );
  16085. }
  16086. template <typename PipelineExecutableInternalRepresentationKHRAllocator,
  16087. typename Dispatch,
  16088. typename B1,
  16089. typename std::enable_if<std::is_same<typename B1::value_type, PipelineExecutableInternalRepresentationKHR>::value, int>::type>
  16090. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<
  16091. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type
  16092. Device::getPipelineExecutableInternalRepresentationsKHR(
  16093. const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR & executableInfo,
  16094. PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator,
  16095. Dispatch const & d ) const
  16096. {
  16097. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16098. std::vector<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>
  16099. internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator );
  16100. uint32_t internalRepresentationCount;
  16101. VkResult result;
  16102. do
  16103. {
  16104. result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
  16105. m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr );
  16106. if ( ( result == VK_SUCCESS ) && internalRepresentationCount )
  16107. {
  16108. internalRepresentations.resize( internalRepresentationCount );
  16109. result = d.vkGetPipelineExecutableInternalRepresentationsKHR(
  16110. m_device,
  16111. reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ),
  16112. &internalRepresentationCount,
  16113. reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) );
  16114. }
  16115. } while ( result == VK_INCOMPLETE );
  16116. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineExecutableInternalRepresentationsKHR" );
  16117. VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
  16118. if ( internalRepresentationCount < internalRepresentations.size() )
  16119. {
  16120. internalRepresentations.resize( internalRepresentationCount );
  16121. }
  16122. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), internalRepresentations );
  16123. }
  16124. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16125. //=== VK_EXT_host_image_copy ===
  16126. template <typename Dispatch>
  16127. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT * pCopyMemoryToImageInfo,
  16128. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16129. {
  16130. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16131. return static_cast<Result>( d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast<const VkCopyMemoryToImageInfoEXT *>( pCopyMemoryToImageInfo ) ) );
  16132. }
  16133. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16134. template <typename Dispatch>
  16135. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  16136. Device::copyMemoryToImageEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT & copyMemoryToImageInfo, Dispatch const & d ) const
  16137. {
  16138. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16139. VkResult result = d.vkCopyMemoryToImageEXT( m_device, reinterpret_cast<const VkCopyMemoryToImageInfoEXT *>( &copyMemoryToImageInfo ) );
  16140. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToImageEXT" );
  16141. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  16142. }
  16143. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16144. template <typename Dispatch>
  16145. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT * pCopyImageToMemoryInfo,
  16146. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16147. {
  16148. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16149. return static_cast<Result>( d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast<const VkCopyImageToMemoryInfoEXT *>( pCopyImageToMemoryInfo ) ) );
  16150. }
  16151. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16152. template <typename Dispatch>
  16153. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  16154. Device::copyImageToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT & copyImageToMemoryInfo, Dispatch const & d ) const
  16155. {
  16156. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16157. VkResult result = d.vkCopyImageToMemoryEXT( m_device, reinterpret_cast<const VkCopyImageToMemoryInfoEXT *>( &copyImageToMemoryInfo ) );
  16158. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToMemoryEXT" );
  16159. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  16160. }
  16161. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16162. template <typename Dispatch>
  16163. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT * pCopyImageToImageInfo,
  16164. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16165. {
  16166. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16167. return static_cast<Result>( d.vkCopyImageToImageEXT( m_device, reinterpret_cast<const VkCopyImageToImageInfoEXT *>( pCopyImageToImageInfo ) ) );
  16168. }
  16169. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16170. template <typename Dispatch>
  16171. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  16172. Device::copyImageToImageEXT( const VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT & copyImageToImageInfo, Dispatch const & d ) const
  16173. {
  16174. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16175. VkResult result = d.vkCopyImageToImageEXT( m_device, reinterpret_cast<const VkCopyImageToImageInfoEXT *>( &copyImageToImageInfo ) );
  16176. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::copyImageToImageEXT" );
  16177. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  16178. }
  16179. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16180. template <typename Dispatch>
  16181. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::transitionImageLayoutEXT( uint32_t transitionCount,
  16182. const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT * pTransitions,
  16183. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16184. {
  16185. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16186. return static_cast<Result>(
  16187. d.vkTransitionImageLayoutEXT( m_device, transitionCount, reinterpret_cast<const VkHostImageLayoutTransitionInfoEXT *>( pTransitions ) ) );
  16188. }
  16189. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16190. template <typename Dispatch>
  16191. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  16192. Device::transitionImageLayoutEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT> const & transitions,
  16193. Dispatch const & d ) const
  16194. {
  16195. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16196. VkResult result =
  16197. d.vkTransitionImageLayoutEXT( m_device, transitions.size(), reinterpret_cast<const VkHostImageLayoutTransitionInfoEXT *>( transitions.data() ) );
  16198. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::transitionImageLayoutEXT" );
  16199. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  16200. }
  16201. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16202. template <typename Dispatch>
  16203. VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2EXT( VULKAN_HPP_NAMESPACE::Image image,
  16204. const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource,
  16205. VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,
  16206. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16207. {
  16208. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16209. d.vkGetImageSubresourceLayout2EXT( m_device,
  16210. static_cast<VkImage>( image ),
  16211. reinterpret_cast<const VkImageSubresource2KHR *>( pSubresource ),
  16212. reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) );
  16213. }
  16214. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16215. template <typename Dispatch>
  16216. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR Device::getImageSubresourceLayout2EXT(
  16217. VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16218. {
  16219. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16220. VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
  16221. d.vkGetImageSubresourceLayout2EXT( m_device,
  16222. static_cast<VkImage>( image ),
  16223. reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
  16224. reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
  16225. return layout;
  16226. }
  16227. template <typename X, typename Y, typename... Z, typename Dispatch>
  16228. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2EXT(
  16229. VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16230. {
  16231. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16232. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  16233. VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
  16234. d.vkGetImageSubresourceLayout2EXT( m_device,
  16235. static_cast<VkImage>( image ),
  16236. reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
  16237. reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
  16238. return structureChain;
  16239. }
  16240. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16241. //=== VK_KHR_map_memory2 ===
  16242. template <typename Dispatch>
  16243. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR * pMemoryMapInfo,
  16244. void ** ppData,
  16245. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16246. {
  16247. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16248. return static_cast<Result>( d.vkMapMemory2KHR( m_device, reinterpret_cast<const VkMemoryMapInfoKHR *>( pMemoryMapInfo ), ppData ) );
  16249. }
  16250. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16251. template <typename Dispatch>
  16252. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type
  16253. Device::mapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR & memoryMapInfo, Dispatch const & d ) const
  16254. {
  16255. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16256. void * pData;
  16257. VkResult result = d.vkMapMemory2KHR( m_device, reinterpret_cast<const VkMemoryMapInfoKHR *>( &memoryMapInfo ), &pData );
  16258. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory2KHR" );
  16259. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pData );
  16260. }
  16261. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16262. template <typename Dispatch>
  16263. VULKAN_HPP_INLINE Result Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR * pMemoryUnmapInfo,
  16264. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16265. {
  16266. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16267. return static_cast<Result>( d.vkUnmapMemory2KHR( m_device, reinterpret_cast<const VkMemoryUnmapInfoKHR *>( pMemoryUnmapInfo ) ) );
  16268. }
  16269. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16270. template <typename Dispatch>
  16271. VULKAN_HPP_INLINE void Device::unmapMemory2KHR( const VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR & memoryUnmapInfo,
  16272. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16273. {
  16274. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16275. d.vkUnmapMemory2KHR( m_device, reinterpret_cast<const VkMemoryUnmapInfoKHR *>( &memoryUnmapInfo ) );
  16276. }
  16277. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16278. //=== VK_EXT_swapchain_maintenance1 ===
  16279. template <typename Dispatch>
  16280. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT * pReleaseInfo,
  16281. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16282. {
  16283. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16284. return static_cast<Result>( d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( pReleaseInfo ) ) );
  16285. }
  16286. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16287. template <typename Dispatch>
  16288. VULKAN_HPP_INLINE typename ResultValueType<void>::type
  16289. Device::releaseSwapchainImagesEXT( const VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT & releaseInfo, Dispatch const & d ) const
  16290. {
  16291. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16292. VkResult result = d.vkReleaseSwapchainImagesEXT( m_device, reinterpret_cast<const VkReleaseSwapchainImagesInfoEXT *>( &releaseInfo ) );
  16293. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::releaseSwapchainImagesEXT" );
  16294. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  16295. }
  16296. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16297. //=== VK_NV_device_generated_commands ===
  16298. template <typename Dispatch>
  16299. VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV * pInfo,
  16300. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  16301. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16302. {
  16303. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16304. d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
  16305. reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ),
  16306. reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  16307. }
  16308. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16309. template <typename Dispatch>
  16310. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  16311. Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,
  16312. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16313. {
  16314. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16315. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  16316. d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
  16317. reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
  16318. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  16319. return memoryRequirements;
  16320. }
  16321. template <typename X, typename Y, typename... Z, typename Dispatch>
  16322. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  16323. Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV & info,
  16324. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16325. {
  16326. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16327. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  16328. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  16329. d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device,
  16330. reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ),
  16331. reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  16332. return structureChain;
  16333. }
  16334. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16335. template <typename Dispatch>
  16336. VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
  16337. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16338. {
  16339. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16340. d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
  16341. }
  16342. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16343. template <typename Dispatch>
  16344. VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,
  16345. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16346. {
  16347. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16348. d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
  16349. }
  16350. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16351. template <typename Dispatch>
  16352. VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
  16353. const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV * pGeneratedCommandsInfo,
  16354. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16355. {
  16356. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16357. d.vkCmdExecuteGeneratedCommandsNV(
  16358. m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
  16359. }
  16360. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16361. template <typename Dispatch>
  16362. VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed,
  16363. const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV & generatedCommandsInfo,
  16364. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16365. {
  16366. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16367. d.vkCmdExecuteGeneratedCommandsNV(
  16368. m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
  16369. }
  16370. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16371. template <typename Dispatch>
  16372. VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  16373. VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  16374. uint32_t groupIndex,
  16375. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16376. {
  16377. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16378. d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex );
  16379. }
  16380. template <typename Dispatch>
  16381. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  16382. Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV * pCreateInfo,
  16383. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  16384. VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV * pIndirectCommandsLayout,
  16385. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16386. {
  16387. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16388. return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device,
  16389. reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ),
  16390. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  16391. reinterpret_cast<VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) );
  16392. }
  16393. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16394. template <typename Dispatch>
  16395. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type
  16396. Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,
  16397. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16398. Dispatch const & d ) const
  16399. {
  16400. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16401. VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
  16402. VkResult result = d.vkCreateIndirectCommandsLayoutNV(
  16403. m_device,
  16404. reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
  16405. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  16406. reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) );
  16407. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" );
  16408. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), indirectCommandsLayout );
  16409. }
  16410. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  16411. template <typename Dispatch>
  16412. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type
  16413. Device::createIndirectCommandsLayoutNVUnique( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV & createInfo,
  16414. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16415. Dispatch const & d ) const
  16416. {
  16417. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16418. VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
  16419. VkResult result = d.vkCreateIndirectCommandsLayoutNV(
  16420. m_device,
  16421. reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ),
  16422. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  16423. reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) );
  16424. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique" );
  16425. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16426. UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>(
  16427. indirectCommandsLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  16428. }
  16429. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  16430. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16431. template <typename Dispatch>
  16432. VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
  16433. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  16434. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16435. {
  16436. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16437. d.vkDestroyIndirectCommandsLayoutNV(
  16438. m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  16439. }
  16440. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16441. template <typename Dispatch>
  16442. VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
  16443. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16444. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16445. {
  16446. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16447. d.vkDestroyIndirectCommandsLayoutNV(
  16448. m_device,
  16449. static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
  16450. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  16451. }
  16452. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16453. template <typename Dispatch>
  16454. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
  16455. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  16456. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16457. {
  16458. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16459. d.vkDestroyIndirectCommandsLayoutNV(
  16460. m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  16461. }
  16462. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16463. template <typename Dispatch>
  16464. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout,
  16465. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16466. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16467. {
  16468. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16469. d.vkDestroyIndirectCommandsLayoutNV(
  16470. m_device,
  16471. static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ),
  16472. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  16473. }
  16474. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16475. //=== VK_EXT_depth_bias_control ===
  16476. template <typename Dispatch>
  16477. VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT * pDepthBiasInfo,
  16478. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16479. {
  16480. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16481. d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast<const VkDepthBiasInfoEXT *>( pDepthBiasInfo ) );
  16482. }
  16483. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16484. template <typename Dispatch>
  16485. VULKAN_HPP_INLINE void CommandBuffer::setDepthBias2EXT( const VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT & depthBiasInfo,
  16486. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16487. {
  16488. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16489. d.vkCmdSetDepthBias2EXT( m_commandBuffer, reinterpret_cast<const VkDepthBiasInfoEXT *>( &depthBiasInfo ) );
  16490. }
  16491. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16492. //=== VK_EXT_acquire_drm_display ===
  16493. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16494. template <typename Dispatch>
  16495. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd,
  16496. VULKAN_HPP_NAMESPACE::DisplayKHR display,
  16497. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16498. {
  16499. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16500. return static_cast<Result>( d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) ) );
  16501. }
  16502. #else
  16503. template <typename Dispatch>
  16504. VULKAN_HPP_INLINE typename ResultValueType<void>::type
  16505. PhysicalDevice::acquireDrmDisplayEXT( int32_t drmFd, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  16506. {
  16507. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16508. VkResult result = d.vkAcquireDrmDisplayEXT( m_physicalDevice, drmFd, static_cast<VkDisplayKHR>( display ) );
  16509. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireDrmDisplayEXT" );
  16510. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  16511. }
  16512. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16513. template <typename Dispatch>
  16514. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDrmDisplayEXT( int32_t drmFd,
  16515. uint32_t connectorId,
  16516. VULKAN_HPP_NAMESPACE::DisplayKHR * display,
  16517. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16518. {
  16519. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16520. return static_cast<Result>( d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( display ) ) );
  16521. }
  16522. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16523. template <typename Dispatch>
  16524. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
  16525. PhysicalDevice::getDrmDisplayEXT( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const
  16526. {
  16527. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16528. VULKAN_HPP_NAMESPACE::DisplayKHR display;
  16529. VkResult result = d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) );
  16530. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXT" );
  16531. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), display );
  16532. }
  16533. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  16534. template <typename Dispatch>
  16535. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
  16536. PhysicalDevice::getDrmDisplayEXTUnique( int32_t drmFd, uint32_t connectorId, Dispatch const & d ) const
  16537. {
  16538. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16539. VULKAN_HPP_NAMESPACE::DisplayKHR display;
  16540. VkResult result = d.vkGetDrmDisplayEXT( m_physicalDevice, drmFd, connectorId, reinterpret_cast<VkDisplayKHR *>( &display ) );
  16541. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDrmDisplayEXTUnique" );
  16542. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16543. UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) );
  16544. }
  16545. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  16546. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16547. //=== VK_EXT_private_data ===
  16548. template <typename Dispatch>
  16549. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo,
  16550. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  16551. VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot,
  16552. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16553. {
  16554. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16555. return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device,
  16556. reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ),
  16557. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  16558. reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) );
  16559. }
  16560. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16561. template <typename Dispatch>
  16562. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type
  16563. Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
  16564. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16565. Dispatch const & d ) const
  16566. {
  16567. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16568. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
  16569. VkResult result = d.vkCreatePrivateDataSlotEXT(
  16570. m_device,
  16571. reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
  16572. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  16573. reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
  16574. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" );
  16575. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), privateDataSlot );
  16576. }
  16577. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  16578. template <typename Dispatch>
  16579. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type
  16580. Device::createPrivateDataSlotEXTUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo,
  16581. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16582. Dispatch const & d ) const
  16583. {
  16584. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16585. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot;
  16586. VkResult result = d.vkCreatePrivateDataSlotEXT(
  16587. m_device,
  16588. reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ),
  16589. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  16590. reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) );
  16591. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique" );
  16592. return createResultValueType(
  16593. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  16594. UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  16595. }
  16596. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  16597. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16598. template <typename Dispatch>
  16599. VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  16600. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  16601. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16602. {
  16603. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16604. d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  16605. }
  16606. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16607. template <typename Dispatch>
  16608. VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  16609. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  16610. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16611. {
  16612. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16613. d.vkDestroyPrivateDataSlotEXT(
  16614. m_device,
  16615. static_cast<VkPrivateDataSlot>( privateDataSlot ),
  16616. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  16617. }
  16618. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16619. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16620. template <typename Dispatch>
  16621. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
  16622. uint64_t objectHandle,
  16623. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  16624. uint64_t data,
  16625. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16626. {
  16627. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16628. return static_cast<Result>(
  16629. d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) );
  16630. }
  16631. #else
  16632. template <typename Dispatch>
  16633. VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
  16634. uint64_t objectHandle,
  16635. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  16636. uint64_t data,
  16637. Dispatch const & d ) const
  16638. {
  16639. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16640. VkResult result =
  16641. d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data );
  16642. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" );
  16643. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  16644. }
  16645. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  16646. template <typename Dispatch>
  16647. VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
  16648. uint64_t objectHandle,
  16649. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  16650. uint64_t * pData,
  16651. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16652. {
  16653. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16654. d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData );
  16655. }
  16656. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16657. template <typename Dispatch>
  16658. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_,
  16659. uint64_t objectHandle,
  16660. VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot,
  16661. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16662. {
  16663. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16664. uint64_t data;
  16665. d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType_ ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data );
  16666. return data;
  16667. }
  16668. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16669. #if defined( VK_ENABLE_BETA_EXTENSIONS )
  16670. //=== VK_KHR_video_encode_queue ===
  16671. template <typename Dispatch>
  16672. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  16673. PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR * pQualityLevelInfo,
  16674. VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR * pQualityLevelProperties,
  16675. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16676. {
  16677. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16678. return static_cast<Result>(
  16679. d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice,
  16680. reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( pQualityLevelInfo ),
  16681. reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( pQualityLevelProperties ) ) );
  16682. }
  16683. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16684. template <typename Dispatch>
  16685. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR>::type
  16686. PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo,
  16687. Dispatch const & d ) const
  16688. {
  16689. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16690. VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR qualityLevelProperties;
  16691. VkResult result =
  16692. d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice,
  16693. reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( &qualityLevelInfo ),
  16694. reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( &qualityLevelProperties ) );
  16695. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" );
  16696. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), qualityLevelProperties );
  16697. }
  16698. template <typename X, typename Y, typename... Z, typename Dispatch>
  16699. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type
  16700. PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR & qualityLevelInfo,
  16701. Dispatch const & d ) const
  16702. {
  16703. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16704. StructureChain<X, Y, Z...> structureChain;
  16705. VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR & qualityLevelProperties =
  16706. structureChain.template get<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR>();
  16707. VkResult result =
  16708. d.vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( m_physicalDevice,
  16709. reinterpret_cast<const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR *>( &qualityLevelInfo ),
  16710. reinterpret_cast<VkVideoEncodeQualityLevelPropertiesKHR *>( &qualityLevelProperties ) );
  16711. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoEncodeQualityLevelPropertiesKHR" );
  16712. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
  16713. }
  16714. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16715. template <typename Dispatch>
  16716. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  16717. Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR * pVideoSessionParametersInfo,
  16718. VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR * pFeedbackInfo,
  16719. size_t * pDataSize,
  16720. void * pData,
  16721. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16722. {
  16723. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16724. return static_cast<Result>(
  16725. d.vkGetEncodedVideoSessionParametersKHR( m_device,
  16726. reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( pVideoSessionParametersInfo ),
  16727. reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( pFeedbackInfo ),
  16728. pDataSize,
  16729. pData ) );
  16730. }
  16731. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16732. template <typename Uint8_tAllocator, typename Dispatch>
  16733. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  16734. typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>>>::type
  16735. Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,
  16736. Dispatch const & d ) const
  16737. {
  16738. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16739. std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>> data_;
  16740. VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first;
  16741. std::vector<uint8_t, Uint8_tAllocator> & data = data_.second;
  16742. size_t dataSize;
  16743. VkResult result;
  16744. do
  16745. {
  16746. result = d.vkGetEncodedVideoSessionParametersKHR( m_device,
  16747. reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
  16748. reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
  16749. &dataSize,
  16750. nullptr );
  16751. if ( ( result == VK_SUCCESS ) && dataSize )
  16752. {
  16753. data.resize( dataSize );
  16754. result = d.vkGetEncodedVideoSessionParametersKHR( m_device,
  16755. reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
  16756. reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
  16757. &dataSize,
  16758. reinterpret_cast<void *>( data.data() ) );
  16759. }
  16760. } while ( result == VK_INCOMPLETE );
  16761. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" );
  16762. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
  16763. }
  16764. template <typename Uint8_tAllocator,
  16765. typename Dispatch,
  16766. typename B2,
  16767. typename std::enable_if<std::is_same<typename B2::value_type, uint8_t>::value, int>::type>
  16768. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  16769. typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>>>::type
  16770. Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,
  16771. Uint8_tAllocator & uint8_tAllocator,
  16772. Dispatch const & d ) const
  16773. {
  16774. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16775. std::pair<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR, std::vector<uint8_t, Uint8_tAllocator>> data_(
  16776. std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) );
  16777. VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo = data_.first;
  16778. std::vector<uint8_t, Uint8_tAllocator> & data = data_.second;
  16779. size_t dataSize;
  16780. VkResult result;
  16781. do
  16782. {
  16783. result = d.vkGetEncodedVideoSessionParametersKHR( m_device,
  16784. reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
  16785. reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
  16786. &dataSize,
  16787. nullptr );
  16788. if ( ( result == VK_SUCCESS ) && dataSize )
  16789. {
  16790. data.resize( dataSize );
  16791. result = d.vkGetEncodedVideoSessionParametersKHR( m_device,
  16792. reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
  16793. reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
  16794. &dataSize,
  16795. reinterpret_cast<void *>( data.data() ) );
  16796. }
  16797. } while ( result == VK_INCOMPLETE );
  16798. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" );
  16799. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
  16800. }
  16801. template <typename X, typename Y, typename... Z, typename Uint8_tAllocator, typename Dispatch>
  16802. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  16803. typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>>>::type
  16804. Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,
  16805. Dispatch const & d ) const
  16806. {
  16807. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16808. std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>> data_;
  16809. VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo =
  16810. data_.first.template get<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR>();
  16811. std::vector<uint8_t, Uint8_tAllocator> & data = data_.second;
  16812. size_t dataSize;
  16813. VkResult result;
  16814. do
  16815. {
  16816. result = d.vkGetEncodedVideoSessionParametersKHR( m_device,
  16817. reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
  16818. reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
  16819. &dataSize,
  16820. nullptr );
  16821. if ( ( result == VK_SUCCESS ) && dataSize )
  16822. {
  16823. data.resize( dataSize );
  16824. result = d.vkGetEncodedVideoSessionParametersKHR( m_device,
  16825. reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
  16826. reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
  16827. &dataSize,
  16828. reinterpret_cast<void *>( data.data() ) );
  16829. }
  16830. } while ( result == VK_INCOMPLETE );
  16831. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" );
  16832. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
  16833. }
  16834. template <typename X,
  16835. typename Y,
  16836. typename... Z,
  16837. typename Uint8_tAllocator,
  16838. typename Dispatch,
  16839. typename B2,
  16840. typename std::enable_if<std::is_same<typename B2::value_type, uint8_t>::value, int>::type>
  16841. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  16842. typename ResultValueType<std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>>>::type
  16843. Device::getEncodedVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR & videoSessionParametersInfo,
  16844. Uint8_tAllocator & uint8_tAllocator,
  16845. Dispatch const & d ) const
  16846. {
  16847. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16848. std::pair<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>, std::vector<uint8_t, Uint8_tAllocator>> data_(
  16849. std::piecewise_construct, std::forward_as_tuple(), std::forward_as_tuple( uint8_tAllocator ) );
  16850. VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR & feedbackInfo =
  16851. data_.first.template get<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR>();
  16852. std::vector<uint8_t, Uint8_tAllocator> & data = data_.second;
  16853. size_t dataSize;
  16854. VkResult result;
  16855. do
  16856. {
  16857. result = d.vkGetEncodedVideoSessionParametersKHR( m_device,
  16858. reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
  16859. reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
  16860. &dataSize,
  16861. nullptr );
  16862. if ( ( result == VK_SUCCESS ) && dataSize )
  16863. {
  16864. data.resize( dataSize );
  16865. result = d.vkGetEncodedVideoSessionParametersKHR( m_device,
  16866. reinterpret_cast<const VkVideoEncodeSessionParametersGetInfoKHR *>( &videoSessionParametersInfo ),
  16867. reinterpret_cast<VkVideoEncodeSessionParametersFeedbackInfoKHR *>( &feedbackInfo ),
  16868. &dataSize,
  16869. reinterpret_cast<void *>( data.data() ) );
  16870. }
  16871. } while ( result == VK_INCOMPLETE );
  16872. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getEncodedVideoSessionParametersKHR" );
  16873. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
  16874. }
  16875. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16876. template <typename Dispatch>
  16877. VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR * pEncodeInfo,
  16878. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16879. {
  16880. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16881. d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( pEncodeInfo ) );
  16882. }
  16883. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16884. template <typename Dispatch>
  16885. VULKAN_HPP_INLINE void CommandBuffer::encodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR & encodeInfo,
  16886. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16887. {
  16888. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16889. d.vkCmdEncodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoEncodeInfoKHR *>( &encodeInfo ) );
  16890. }
  16891. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16892. #endif /*VK_ENABLE_BETA_EXTENSIONS*/
  16893. #if defined( VK_USE_PLATFORM_METAL_EXT )
  16894. //=== VK_EXT_metal_objects ===
  16895. template <typename Dispatch>
  16896. VULKAN_HPP_INLINE void Device::exportMetalObjectsEXT( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT * pMetalObjectsInfo,
  16897. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16898. {
  16899. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16900. d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( pMetalObjectsInfo ) );
  16901. }
  16902. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16903. template <typename Dispatch>
  16904. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT
  16905. Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16906. {
  16907. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16908. VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT metalObjectsInfo;
  16909. d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
  16910. return metalObjectsInfo;
  16911. }
  16912. template <typename X, typename Y, typename... Z, typename Dispatch>
  16913. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  16914. Device::exportMetalObjectsEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16915. {
  16916. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16917. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  16918. VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT & metalObjectsInfo = structureChain.template get<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>();
  16919. d.vkExportMetalObjectsEXT( m_device, reinterpret_cast<VkExportMetalObjectsInfoEXT *>( &metalObjectsInfo ) );
  16920. return structureChain;
  16921. }
  16922. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16923. #endif /*VK_USE_PLATFORM_METAL_EXT*/
  16924. //=== VK_KHR_synchronization2 ===
  16925. template <typename Dispatch>
  16926. VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
  16927. const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
  16928. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16929. {
  16930. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16931. d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
  16932. }
  16933. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16934. template <typename Dispatch>
  16935. VULKAN_HPP_INLINE void CommandBuffer::setEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
  16936. const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
  16937. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16938. {
  16939. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16940. d.vkCmdSetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
  16941. }
  16942. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16943. template <typename Dispatch>
  16944. VULKAN_HPP_INLINE void CommandBuffer::resetEvent2KHR( VULKAN_HPP_NAMESPACE::Event event,
  16945. VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask,
  16946. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16947. {
  16948. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16949. d.vkCmdResetEvent2KHR( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) );
  16950. }
  16951. template <typename Dispatch>
  16952. VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( uint32_t eventCount,
  16953. const VULKAN_HPP_NAMESPACE::Event * pEvents,
  16954. const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos,
  16955. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16956. {
  16957. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16958. d.vkCmdWaitEvents2KHR(
  16959. m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) );
  16960. }
  16961. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16962. template <typename Dispatch>
  16963. VULKAN_HPP_INLINE void CommandBuffer::waitEvents2KHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events,
  16964. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos,
  16965. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  16966. {
  16967. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16968. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  16969. VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() );
  16970. # else
  16971. if ( events.size() != dependencyInfos.size() )
  16972. {
  16973. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2KHR: events.size() != dependencyInfos.size()" );
  16974. }
  16975. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  16976. d.vkCmdWaitEvents2KHR( m_commandBuffer,
  16977. events.size(),
  16978. reinterpret_cast<const VkEvent *>( events.data() ),
  16979. reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) );
  16980. }
  16981. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16982. template <typename Dispatch>
  16983. VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo,
  16984. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16985. {
  16986. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16987. d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) );
  16988. }
  16989. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  16990. template <typename Dispatch>
  16991. VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2KHR( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo,
  16992. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  16993. {
  16994. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  16995. d.vkCmdPipelineBarrier2KHR( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) );
  16996. }
  16997. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  16998. template <typename Dispatch>
  16999. VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2KHR( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
  17000. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  17001. uint32_t query,
  17002. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17003. {
  17004. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17005. d.vkCmdWriteTimestamp2KHR( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query );
  17006. }
  17007. template <typename Dispatch>
  17008. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2KHR( uint32_t submitCount,
  17009. const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits,
  17010. VULKAN_HPP_NAMESPACE::Fence fence,
  17011. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17012. {
  17013. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17014. return static_cast<Result>(
  17015. d.vkQueueSubmit2KHR( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) );
  17016. }
  17017. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17018. template <typename Dispatch>
  17019. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2KHR(
  17020. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
  17021. {
  17022. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17023. VkResult result = d.vkQueueSubmit2KHR( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) );
  17024. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2KHR" );
  17025. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  17026. }
  17027. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17028. template <typename Dispatch>
  17029. VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarker2AMD( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage,
  17030. VULKAN_HPP_NAMESPACE::Buffer dstBuffer,
  17031. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset,
  17032. uint32_t marker,
  17033. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17034. {
  17035. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17036. d.vkCmdWriteBufferMarker2AMD(
  17037. m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
  17038. }
  17039. template <typename Dispatch>
  17040. VULKAN_HPP_INLINE void Queue::getCheckpointData2NV( uint32_t * pCheckpointDataCount,
  17041. VULKAN_HPP_NAMESPACE::CheckpointData2NV * pCheckpointData,
  17042. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17043. {
  17044. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17045. d.vkGetQueueCheckpointData2NV( m_queue, pCheckpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( pCheckpointData ) );
  17046. }
  17047. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17048. template <typename CheckpointData2NVAllocator, typename Dispatch>
  17049. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator>
  17050. Queue::getCheckpointData2NV( Dispatch const & d ) const
  17051. {
  17052. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17053. std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData;
  17054. uint32_t checkpointDataCount;
  17055. d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
  17056. checkpointData.resize( checkpointDataCount );
  17057. d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
  17058. VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
  17059. if ( checkpointDataCount < checkpointData.size() )
  17060. {
  17061. checkpointData.resize( checkpointDataCount );
  17062. }
  17063. return checkpointData;
  17064. }
  17065. template <typename CheckpointData2NVAllocator,
  17066. typename Dispatch,
  17067. typename B1,
  17068. typename std::enable_if<std::is_same<typename B1::value_type, CheckpointData2NV>::value, int>::type>
  17069. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator>
  17070. Queue::getCheckpointData2NV( CheckpointData2NVAllocator & checkpointData2NVAllocator, Dispatch const & d ) const
  17071. {
  17072. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17073. std::vector<VULKAN_HPP_NAMESPACE::CheckpointData2NV, CheckpointData2NVAllocator> checkpointData( checkpointData2NVAllocator );
  17074. uint32_t checkpointDataCount;
  17075. d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, nullptr );
  17076. checkpointData.resize( checkpointDataCount );
  17077. d.vkGetQueueCheckpointData2NV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointData2NV *>( checkpointData.data() ) );
  17078. VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
  17079. if ( checkpointDataCount < checkpointData.size() )
  17080. {
  17081. checkpointData.resize( checkpointDataCount );
  17082. }
  17083. return checkpointData;
  17084. }
  17085. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17086. //=== VK_EXT_descriptor_buffer ===
  17087. template <typename Dispatch>
  17088. VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,
  17089. VULKAN_HPP_NAMESPACE::DeviceSize * pLayoutSizeInBytes,
  17090. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17091. {
  17092. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17093. d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( pLayoutSizeInBytes ) );
  17094. }
  17095. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17096. template <typename Dispatch>
  17097. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize
  17098. Device::getDescriptorSetLayoutSizeEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17099. {
  17100. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17101. VULKAN_HPP_NAMESPACE::DeviceSize layoutSizeInBytes;
  17102. d.vkGetDescriptorSetLayoutSizeEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), reinterpret_cast<VkDeviceSize *>( &layoutSizeInBytes ) );
  17103. return layoutSizeInBytes;
  17104. }
  17105. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17106. template <typename Dispatch>
  17107. VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutBindingOffsetEXT( VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout,
  17108. uint32_t binding,
  17109. VULKAN_HPP_NAMESPACE::DeviceSize * pOffset,
  17110. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17111. {
  17112. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17113. d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( pOffset ) );
  17114. }
  17115. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17116. template <typename Dispatch>
  17117. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getDescriptorSetLayoutBindingOffsetEXT(
  17118. VULKAN_HPP_NAMESPACE::DescriptorSetLayout layout, uint32_t binding, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17119. {
  17120. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17121. VULKAN_HPP_NAMESPACE::DeviceSize offset;
  17122. d.vkGetDescriptorSetLayoutBindingOffsetEXT( m_device, static_cast<VkDescriptorSetLayout>( layout ), binding, reinterpret_cast<VkDeviceSize *>( &offset ) );
  17123. return offset;
  17124. }
  17125. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17126. template <typename Dispatch>
  17127. VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT * pDescriptorInfo,
  17128. size_t dataSize,
  17129. void * pDescriptor,
  17130. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17131. {
  17132. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17133. d.vkGetDescriptorEXT( m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( pDescriptorInfo ), dataSize, pDescriptor );
  17134. }
  17135. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17136. template <typename Dispatch>
  17137. VULKAN_HPP_INLINE void Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo,
  17138. size_t dataSize,
  17139. void * pDescriptor,
  17140. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17141. {
  17142. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17143. d.vkGetDescriptorEXT( m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), dataSize, pDescriptor );
  17144. }
  17145. template <typename DescriptorType, typename Dispatch>
  17146. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE DescriptorType Device::getDescriptorEXT( const VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT & descriptorInfo,
  17147. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17148. {
  17149. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17150. DescriptorType descriptor;
  17151. d.vkGetDescriptorEXT(
  17152. m_device, reinterpret_cast<const VkDescriptorGetInfoEXT *>( &descriptorInfo ), sizeof( DescriptorType ), reinterpret_cast<void *>( &descriptor ) );
  17153. return descriptor;
  17154. }
  17155. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17156. template <typename Dispatch>
  17157. VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBuffersEXT( uint32_t bufferCount,
  17158. const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT * pBindingInfos,
  17159. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17160. {
  17161. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17162. d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bufferCount, reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( pBindingInfos ) );
  17163. }
  17164. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17165. template <typename Dispatch>
  17166. VULKAN_HPP_INLINE void
  17167. CommandBuffer::bindDescriptorBuffersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT> const & bindingInfos,
  17168. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17169. {
  17170. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17171. d.vkCmdBindDescriptorBuffersEXT( m_commandBuffer, bindingInfos.size(), reinterpret_cast<const VkDescriptorBufferBindingInfoEXT *>( bindingInfos.data() ) );
  17172. }
  17173. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17174. template <typename Dispatch>
  17175. VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  17176. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  17177. uint32_t firstSet,
  17178. uint32_t setCount,
  17179. const uint32_t * pBufferIndices,
  17180. const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets,
  17181. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17182. {
  17183. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17184. d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer,
  17185. static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
  17186. static_cast<VkPipelineLayout>( layout ),
  17187. firstSet,
  17188. setCount,
  17189. pBufferIndices,
  17190. reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
  17191. }
  17192. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17193. template <typename Dispatch>
  17194. VULKAN_HPP_INLINE void CommandBuffer::setDescriptorBufferOffsetsEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  17195. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  17196. uint32_t firstSet,
  17197. VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & bufferIndices,
  17198. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets,
  17199. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  17200. {
  17201. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17202. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  17203. VULKAN_HPP_ASSERT( bufferIndices.size() == offsets.size() );
  17204. # else
  17205. if ( bufferIndices.size() != offsets.size() )
  17206. {
  17207. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setDescriptorBufferOffsetsEXT: bufferIndices.size() != offsets.size()" );
  17208. }
  17209. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  17210. d.vkCmdSetDescriptorBufferOffsetsEXT( m_commandBuffer,
  17211. static_cast<VkPipelineBindPoint>( pipelineBindPoint ),
  17212. static_cast<VkPipelineLayout>( layout ),
  17213. firstSet,
  17214. bufferIndices.size(),
  17215. bufferIndices.data(),
  17216. reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
  17217. }
  17218. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17219. template <typename Dispatch>
  17220. VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorBufferEmbeddedSamplersEXT( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  17221. VULKAN_HPP_NAMESPACE::PipelineLayout layout,
  17222. uint32_t set,
  17223. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17224. {
  17225. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17226. d.vkCmdBindDescriptorBufferEmbeddedSamplersEXT(
  17227. m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set );
  17228. }
  17229. template <typename Dispatch>
  17230. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getBufferOpaqueCaptureDescriptorDataEXT(
  17231. const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17232. {
  17233. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17234. return static_cast<Result>(
  17235. d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
  17236. }
  17237. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17238. template <typename DataType, typename Dispatch>
  17239. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
  17240. Device::getBufferOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const
  17241. {
  17242. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17243. DataType data;
  17244. VkResult result = d.vkGetBufferOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkBufferCaptureDescriptorDataInfoEXT *>( &info ), &data );
  17245. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferOpaqueCaptureDescriptorDataEXT" );
  17246. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  17247. }
  17248. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17249. template <typename Dispatch>
  17250. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageOpaqueCaptureDescriptorDataEXT(
  17251. const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17252. {
  17253. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17254. return static_cast<Result>(
  17255. d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
  17256. }
  17257. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17258. template <typename DataType, typename Dispatch>
  17259. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
  17260. Device::getImageOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const
  17261. {
  17262. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17263. DataType data;
  17264. VkResult result = d.vkGetImageOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageCaptureDescriptorDataInfoEXT *>( &info ), &data );
  17265. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageOpaqueCaptureDescriptorDataEXT" );
  17266. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  17267. }
  17268. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17269. template <typename Dispatch>
  17270. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewOpaqueCaptureDescriptorDataEXT(
  17271. const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17272. {
  17273. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17274. return static_cast<Result>(
  17275. d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
  17276. }
  17277. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17278. template <typename DataType, typename Dispatch>
  17279. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
  17280. Device::getImageViewOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const
  17281. {
  17282. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17283. DataType data;
  17284. VkResult result =
  17285. d.vkGetImageViewOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkImageViewCaptureDescriptorDataInfoEXT *>( &info ), &data );
  17286. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewOpaqueCaptureDescriptorDataEXT" );
  17287. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  17288. }
  17289. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17290. template <typename Dispatch>
  17291. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSamplerOpaqueCaptureDescriptorDataEXT(
  17292. const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17293. {
  17294. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17295. return static_cast<Result>(
  17296. d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
  17297. }
  17298. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17299. template <typename DataType, typename Dispatch>
  17300. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
  17301. Device::getSamplerOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT & info, Dispatch const & d ) const
  17302. {
  17303. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17304. DataType data;
  17305. VkResult result = d.vkGetSamplerOpaqueCaptureDescriptorDataEXT( m_device, reinterpret_cast<const VkSamplerCaptureDescriptorDataInfoEXT *>( &info ), &data );
  17306. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSamplerOpaqueCaptureDescriptorDataEXT" );
  17307. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  17308. }
  17309. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17310. template <typename Dispatch>
  17311. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT(
  17312. const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17313. {
  17314. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17315. return static_cast<Result>( d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT(
  17316. m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( pInfo ), pData ) );
  17317. }
  17318. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17319. template <typename DataType, typename Dispatch>
  17320. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
  17321. Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT( const VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT & info,
  17322. Dispatch const & d ) const
  17323. {
  17324. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17325. DataType data;
  17326. VkResult result = d.vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT(
  17327. m_device, reinterpret_cast<const VkAccelerationStructureCaptureDescriptorDataInfoEXT *>( &info ), &data );
  17328. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  17329. VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureOpaqueCaptureDescriptorDataEXT" );
  17330. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  17331. }
  17332. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17333. //=== VK_NV_fragment_shading_rate_enums ===
  17334. template <typename Dispatch>
  17335. VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate,
  17336. const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2],
  17337. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17338. {
  17339. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17340. d.vkCmdSetFragmentShadingRateEnumNV(
  17341. m_commandBuffer, static_cast<VkFragmentShadingRateNV>( shadingRate ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
  17342. }
  17343. //=== VK_EXT_mesh_shader ===
  17344. template <typename Dispatch>
  17345. VULKAN_HPP_INLINE void
  17346. CommandBuffer::drawMeshTasksEXT( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17347. {
  17348. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17349. d.vkCmdDrawMeshTasksEXT( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
  17350. }
  17351. template <typename Dispatch>
  17352. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
  17353. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  17354. uint32_t drawCount,
  17355. uint32_t stride,
  17356. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17357. {
  17358. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17359. d.vkCmdDrawMeshTasksIndirectEXT( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
  17360. }
  17361. template <typename Dispatch>
  17362. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountEXT( VULKAN_HPP_NAMESPACE::Buffer buffer,
  17363. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  17364. VULKAN_HPP_NAMESPACE::Buffer countBuffer,
  17365. VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset,
  17366. uint32_t maxDrawCount,
  17367. uint32_t stride,
  17368. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17369. {
  17370. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17371. d.vkCmdDrawMeshTasksIndirectCountEXT( m_commandBuffer,
  17372. static_cast<VkBuffer>( buffer ),
  17373. static_cast<VkDeviceSize>( offset ),
  17374. static_cast<VkBuffer>( countBuffer ),
  17375. static_cast<VkDeviceSize>( countBufferOffset ),
  17376. maxDrawCount,
  17377. stride );
  17378. }
  17379. //=== VK_KHR_copy_commands2 ===
  17380. template <typename Dispatch>
  17381. VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo,
  17382. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17383. {
  17384. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17385. d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) );
  17386. }
  17387. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17388. template <typename Dispatch>
  17389. VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo,
  17390. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17391. {
  17392. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17393. d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( &copyBufferInfo ) );
  17394. }
  17395. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17396. template <typename Dispatch>
  17397. VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo,
  17398. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17399. {
  17400. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17401. d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) );
  17402. }
  17403. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17404. template <typename Dispatch>
  17405. VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo,
  17406. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17407. {
  17408. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17409. d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( &copyImageInfo ) );
  17410. }
  17411. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17412. template <typename Dispatch>
  17413. VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo,
  17414. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17415. {
  17416. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17417. d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) );
  17418. }
  17419. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17420. template <typename Dispatch>
  17421. VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo,
  17422. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17423. {
  17424. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17425. d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( &copyBufferToImageInfo ) );
  17426. }
  17427. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17428. template <typename Dispatch>
  17429. VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo,
  17430. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17431. {
  17432. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17433. d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) );
  17434. }
  17435. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17436. template <typename Dispatch>
  17437. VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo,
  17438. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17439. {
  17440. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17441. d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( &copyImageToBufferInfo ) );
  17442. }
  17443. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17444. template <typename Dispatch>
  17445. VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo,
  17446. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17447. {
  17448. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17449. d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) );
  17450. }
  17451. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17452. template <typename Dispatch>
  17453. VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo,
  17454. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17455. {
  17456. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17457. d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) );
  17458. }
  17459. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17460. template <typename Dispatch>
  17461. VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo,
  17462. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17463. {
  17464. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17465. d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) );
  17466. }
  17467. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17468. template <typename Dispatch>
  17469. VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo,
  17470. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17471. {
  17472. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17473. d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) );
  17474. }
  17475. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17476. //=== VK_EXT_device_fault ===
  17477. template <typename Dispatch>
  17478. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFaultInfoEXT( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT * pFaultCounts,
  17479. VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT * pFaultInfo,
  17480. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17481. {
  17482. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17483. return static_cast<Result>( d.vkGetDeviceFaultInfoEXT(
  17484. m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( pFaultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( pFaultInfo ) ) );
  17485. }
  17486. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17487. template <typename Dispatch>
  17488. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>>
  17489. Device::getFaultInfoEXT( Dispatch const & d ) const
  17490. {
  17491. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17492. std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT> data_;
  17493. VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT & faultCounts = data_.first;
  17494. VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT & faultInfo = data_.second;
  17495. VkResult result =
  17496. d.vkGetDeviceFaultInfoEXT( m_device, reinterpret_cast<VkDeviceFaultCountsEXT *>( &faultCounts ), reinterpret_cast<VkDeviceFaultInfoEXT *>( &faultInfo ) );
  17497. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  17498. VULKAN_HPP_NAMESPACE_STRING "::Device::getFaultInfoEXT",
  17499. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
  17500. return ResultValue<std::pair<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT, VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>>(
  17501. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data_ );
  17502. }
  17503. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17504. #if defined( VK_USE_PLATFORM_WIN32_KHR )
  17505. //=== VK_NV_acquire_winrt_display ===
  17506. # ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17507. template <typename Dispatch>
  17508. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display,
  17509. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17510. {
  17511. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17512. return static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
  17513. }
  17514. # else
  17515. template <typename Dispatch>
  17516. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  17517. PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  17518. {
  17519. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17520. VkResult result = d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) );
  17521. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" );
  17522. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  17523. }
  17524. # endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  17525. template <typename Dispatch>
  17526. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId,
  17527. VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay,
  17528. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17529. {
  17530. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17531. return static_cast<Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) );
  17532. }
  17533. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17534. template <typename Dispatch>
  17535. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type
  17536. PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const
  17537. {
  17538. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17539. VULKAN_HPP_NAMESPACE::DisplayKHR display;
  17540. VkResult result = d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) );
  17541. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" );
  17542. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), display );
  17543. }
  17544. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  17545. template <typename Dispatch>
  17546. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type
  17547. PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const
  17548. {
  17549. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17550. VULKAN_HPP_NAMESPACE::DisplayKHR display;
  17551. VkResult result = d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) );
  17552. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique" );
  17553. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  17554. UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) );
  17555. }
  17556. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  17557. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17558. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  17559. #if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
  17560. //=== VK_EXT_directfb_surface ===
  17561. template <typename Dispatch>
  17562. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT * pCreateInfo,
  17563. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  17564. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  17565. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17566. {
  17567. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17568. return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance,
  17569. reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ),
  17570. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  17571. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  17572. }
  17573. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17574. template <typename Dispatch>
  17575. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  17576. Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,
  17577. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  17578. Dispatch const & d ) const
  17579. {
  17580. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17581. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  17582. VkResult result = d.vkCreateDirectFBSurfaceEXT(
  17583. m_instance,
  17584. reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
  17585. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  17586. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  17587. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" );
  17588. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  17589. }
  17590. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  17591. template <typename Dispatch>
  17592. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  17593. Instance::createDirectFBSurfaceEXTUnique( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT & createInfo,
  17594. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  17595. Dispatch const & d ) const
  17596. {
  17597. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17598. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  17599. VkResult result = d.vkCreateDirectFBSurfaceEXT(
  17600. m_instance,
  17601. reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ),
  17602. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  17603. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  17604. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique" );
  17605. return createResultValueType(
  17606. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  17607. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  17608. }
  17609. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  17610. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17611. template <typename Dispatch>
  17612. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex,
  17613. IDirectFB * dfb,
  17614. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17615. {
  17616. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17617. return static_cast<Bool32>( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) );
  17618. }
  17619. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17620. template <typename Dispatch>
  17621. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
  17622. PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17623. {
  17624. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17625. VkBool32 result = d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb );
  17626. return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
  17627. }
  17628. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17629. #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  17630. //=== VK_EXT_vertex_input_dynamic_state ===
  17631. template <typename Dispatch>
  17632. VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT( uint32_t vertexBindingDescriptionCount,
  17633. const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT * pVertexBindingDescriptions,
  17634. uint32_t vertexAttributeDescriptionCount,
  17635. const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT * pVertexAttributeDescriptions,
  17636. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17637. {
  17638. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17639. d.vkCmdSetVertexInputEXT( m_commandBuffer,
  17640. vertexBindingDescriptionCount,
  17641. reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( pVertexBindingDescriptions ),
  17642. vertexAttributeDescriptionCount,
  17643. reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( pVertexAttributeDescriptions ) );
  17644. }
  17645. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17646. template <typename Dispatch>
  17647. VULKAN_HPP_INLINE void CommandBuffer::setVertexInputEXT(
  17648. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT> const & vertexBindingDescriptions,
  17649. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT> const & vertexAttributeDescriptions,
  17650. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17651. {
  17652. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17653. d.vkCmdSetVertexInputEXT( m_commandBuffer,
  17654. vertexBindingDescriptions.size(),
  17655. reinterpret_cast<const VkVertexInputBindingDescription2EXT *>( vertexBindingDescriptions.data() ),
  17656. vertexAttributeDescriptions.size(),
  17657. reinterpret_cast<const VkVertexInputAttributeDescription2EXT *>( vertexAttributeDescriptions.data() ) );
  17658. }
  17659. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17660. #if defined( VK_USE_PLATFORM_FUCHSIA )
  17661. //=== VK_FUCHSIA_external_memory ===
  17662. template <typename Dispatch>
  17663. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  17664. Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
  17665. zx_handle_t * pZirconHandle,
  17666. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17667. {
  17668. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17669. return static_cast<Result>(
  17670. d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
  17671. }
  17672. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17673. template <typename Dispatch>
  17674. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type
  17675. Device::getMemoryZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const
  17676. {
  17677. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17678. zx_handle_t zirconHandle;
  17679. VkResult result =
  17680. d.vkGetMemoryZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkMemoryGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle );
  17681. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandleFUCHSIA" );
  17682. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), zirconHandle );
  17683. }
  17684. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17685. template <typename Dispatch>
  17686. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  17687. Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
  17688. zx_handle_t zirconHandle,
  17689. VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties,
  17690. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17691. {
  17692. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17693. return static_cast<Result>(
  17694. d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device,
  17695. static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
  17696. zirconHandle,
  17697. reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( pMemoryZirconHandleProperties ) ) );
  17698. }
  17699. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17700. template <typename Dispatch>
  17701. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::type
  17702. Device::getMemoryZirconHandlePropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType,
  17703. zx_handle_t zirconHandle,
  17704. Dispatch const & d ) const
  17705. {
  17706. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17707. VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA memoryZirconHandleProperties;
  17708. VkResult result = d.vkGetMemoryZirconHandlePropertiesFUCHSIA( m_device,
  17709. static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ),
  17710. zirconHandle,
  17711. reinterpret_cast<VkMemoryZirconHandlePropertiesFUCHSIA *>( &memoryZirconHandleProperties ) );
  17712. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryZirconHandlePropertiesFUCHSIA" );
  17713. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryZirconHandleProperties );
  17714. }
  17715. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17716. #endif /*VK_USE_PLATFORM_FUCHSIA*/
  17717. #if defined( VK_USE_PLATFORM_FUCHSIA )
  17718. //=== VK_FUCHSIA_external_semaphore ===
  17719. template <typename Dispatch>
  17720. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreZirconHandleFUCHSIA(
  17721. const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17722. {
  17723. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17724. return static_cast<Result>( d.vkImportSemaphoreZirconHandleFUCHSIA(
  17725. m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( pImportSemaphoreZirconHandleInfo ) ) );
  17726. }
  17727. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17728. template <typename Dispatch>
  17729. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  17730. Device::importSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA & importSemaphoreZirconHandleInfo,
  17731. Dispatch const & d ) const
  17732. {
  17733. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17734. VkResult result = d.vkImportSemaphoreZirconHandleFUCHSIA(
  17735. m_device, reinterpret_cast<const VkImportSemaphoreZirconHandleInfoFUCHSIA *>( &importSemaphoreZirconHandleInfo ) );
  17736. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreZirconHandleFUCHSIA" );
  17737. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  17738. }
  17739. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17740. template <typename Dispatch>
  17741. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  17742. Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo,
  17743. zx_handle_t * pZirconHandle,
  17744. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17745. {
  17746. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17747. return static_cast<Result>(
  17748. d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( pGetZirconHandleInfo ), pZirconHandle ) );
  17749. }
  17750. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17751. template <typename Dispatch>
  17752. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<zx_handle_t>::type
  17753. Device::getSemaphoreZirconHandleFUCHSIA( const VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA & getZirconHandleInfo, Dispatch const & d ) const
  17754. {
  17755. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17756. zx_handle_t zirconHandle;
  17757. VkResult result =
  17758. d.vkGetSemaphoreZirconHandleFUCHSIA( m_device, reinterpret_cast<const VkSemaphoreGetZirconHandleInfoFUCHSIA *>( &getZirconHandleInfo ), &zirconHandle );
  17759. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreZirconHandleFUCHSIA" );
  17760. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), zirconHandle );
  17761. }
  17762. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17763. #endif /*VK_USE_PLATFORM_FUCHSIA*/
  17764. #if defined( VK_USE_PLATFORM_FUCHSIA )
  17765. //=== VK_FUCHSIA_buffer_collection ===
  17766. template <typename Dispatch>
  17767. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  17768. Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA * pCreateInfo,
  17769. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  17770. VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA * pCollection,
  17771. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17772. {
  17773. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17774. return static_cast<Result>( d.vkCreateBufferCollectionFUCHSIA( m_device,
  17775. reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( pCreateInfo ),
  17776. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  17777. reinterpret_cast<VkBufferCollectionFUCHSIA *>( pCollection ) ) );
  17778. }
  17779. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17780. template <typename Dispatch>
  17781. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>::type
  17782. Device::createBufferCollectionFUCHSIA( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,
  17783. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  17784. Dispatch const & d ) const
  17785. {
  17786. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17787. VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection;
  17788. VkResult result = d.vkCreateBufferCollectionFUCHSIA(
  17789. m_device,
  17790. reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ),
  17791. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  17792. reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) );
  17793. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIA" );
  17794. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), collection );
  17795. }
  17796. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  17797. template <typename Dispatch>
  17798. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>>::type
  17799. Device::createBufferCollectionFUCHSIAUnique( const VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA & createInfo,
  17800. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  17801. Dispatch const & d ) const
  17802. {
  17803. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17804. VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection;
  17805. VkResult result = d.vkCreateBufferCollectionFUCHSIA(
  17806. m_device,
  17807. reinterpret_cast<const VkBufferCollectionCreateInfoFUCHSIA *>( &createInfo ),
  17808. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  17809. reinterpret_cast<VkBufferCollectionFUCHSIA *>( &collection ) );
  17810. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferCollectionFUCHSIAUnique" );
  17811. return createResultValueType(
  17812. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  17813. UniqueHandle<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA, Dispatch>( collection, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  17814. }
  17815. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  17816. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17817. template <typename Dispatch>
  17818. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  17819. Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
  17820. const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA * pImageConstraintsInfo,
  17821. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17822. {
  17823. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17824. return static_cast<Result>( d.vkSetBufferCollectionImageConstraintsFUCHSIA(
  17825. m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( pImageConstraintsInfo ) ) );
  17826. }
  17827. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17828. template <typename Dispatch>
  17829. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  17830. Device::setBufferCollectionImageConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
  17831. const VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA & imageConstraintsInfo,
  17832. Dispatch const & d ) const
  17833. {
  17834. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17835. VkResult result = d.vkSetBufferCollectionImageConstraintsFUCHSIA(
  17836. m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkImageConstraintsInfoFUCHSIA *>( &imageConstraintsInfo ) );
  17837. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionImageConstraintsFUCHSIA" );
  17838. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  17839. }
  17840. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17841. template <typename Dispatch>
  17842. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  17843. Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
  17844. const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo,
  17845. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17846. {
  17847. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17848. return static_cast<Result>( d.vkSetBufferCollectionBufferConstraintsFUCHSIA(
  17849. m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( pBufferConstraintsInfo ) ) );
  17850. }
  17851. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17852. template <typename Dispatch>
  17853. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  17854. Device::setBufferCollectionBufferConstraintsFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
  17855. const VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA & bufferConstraintsInfo,
  17856. Dispatch const & d ) const
  17857. {
  17858. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17859. VkResult result = d.vkSetBufferCollectionBufferConstraintsFUCHSIA(
  17860. m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkBufferConstraintsInfoFUCHSIA *>( &bufferConstraintsInfo ) );
  17861. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setBufferCollectionBufferConstraintsFUCHSIA" );
  17862. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  17863. }
  17864. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17865. template <typename Dispatch>
  17866. VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
  17867. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  17868. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17869. {
  17870. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17871. d.vkDestroyBufferCollectionFUCHSIA(
  17872. m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  17873. }
  17874. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17875. template <typename Dispatch>
  17876. VULKAN_HPP_INLINE void Device::destroyBufferCollectionFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
  17877. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  17878. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17879. {
  17880. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17881. d.vkDestroyBufferCollectionFUCHSIA(
  17882. m_device,
  17883. static_cast<VkBufferCollectionFUCHSIA>( collection ),
  17884. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  17885. }
  17886. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17887. template <typename Dispatch>
  17888. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
  17889. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  17890. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17891. {
  17892. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17893. d.vkDestroyBufferCollectionFUCHSIA(
  17894. m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  17895. }
  17896. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17897. template <typename Dispatch>
  17898. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
  17899. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  17900. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17901. {
  17902. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17903. d.vkDestroyBufferCollectionFUCHSIA(
  17904. m_device,
  17905. static_cast<VkBufferCollectionFUCHSIA>( collection ),
  17906. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  17907. }
  17908. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17909. template <typename Dispatch>
  17910. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  17911. Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection,
  17912. VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA * pProperties,
  17913. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17914. {
  17915. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17916. return static_cast<Result>( d.vkGetBufferCollectionPropertiesFUCHSIA(
  17917. m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( pProperties ) ) );
  17918. }
  17919. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17920. template <typename Dispatch>
  17921. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::type
  17922. Device::getBufferCollectionPropertiesFUCHSIA( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA collection, Dispatch const & d ) const
  17923. {
  17924. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17925. VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA properties;
  17926. VkResult result = d.vkGetBufferCollectionPropertiesFUCHSIA(
  17927. m_device, static_cast<VkBufferCollectionFUCHSIA>( collection ), reinterpret_cast<VkBufferCollectionPropertiesFUCHSIA *>( &properties ) );
  17928. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getBufferCollectionPropertiesFUCHSIA" );
  17929. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  17930. }
  17931. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17932. #endif /*VK_USE_PLATFORM_FUCHSIA*/
  17933. //=== VK_HUAWEI_subpass_shading ===
  17934. template <typename Dispatch>
  17935. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass,
  17936. VULKAN_HPP_NAMESPACE::Extent2D * pMaxWorkgroupSize,
  17937. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17938. {
  17939. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17940. return static_cast<Result>( d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
  17941. m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( pMaxWorkgroupSize ) ) );
  17942. }
  17943. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17944. template <typename Dispatch>
  17945. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Extent2D>
  17946. Device::getSubpassShadingMaxWorkgroupSizeHUAWEI( VULKAN_HPP_NAMESPACE::RenderPass renderpass, Dispatch const & d ) const
  17947. {
  17948. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17949. VULKAN_HPP_NAMESPACE::Extent2D maxWorkgroupSize;
  17950. VkResult result = d.vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
  17951. m_device, static_cast<VkRenderPass>( renderpass ), reinterpret_cast<VkExtent2D *>( &maxWorkgroupSize ) );
  17952. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  17953. VULKAN_HPP_NAMESPACE_STRING "::Device::getSubpassShadingMaxWorkgroupSizeHUAWEI",
  17954. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eIncomplete } );
  17955. return ResultValue<VULKAN_HPP_NAMESPACE::Extent2D>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), maxWorkgroupSize );
  17956. }
  17957. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17958. template <typename Dispatch>
  17959. VULKAN_HPP_INLINE void CommandBuffer::subpassShadingHUAWEI( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17960. {
  17961. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17962. d.vkCmdSubpassShadingHUAWEI( m_commandBuffer );
  17963. }
  17964. //=== VK_HUAWEI_invocation_mask ===
  17965. template <typename Dispatch>
  17966. VULKAN_HPP_INLINE void CommandBuffer::bindInvocationMaskHUAWEI( VULKAN_HPP_NAMESPACE::ImageView imageView,
  17967. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout,
  17968. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17969. {
  17970. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17971. d.vkCmdBindInvocationMaskHUAWEI( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
  17972. }
  17973. //=== VK_NV_external_memory_rdma ===
  17974. template <typename Dispatch>
  17975. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  17976. Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo,
  17977. VULKAN_HPP_NAMESPACE::RemoteAddressNV * pAddress,
  17978. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  17979. {
  17980. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17981. return static_cast<Result>( d.vkGetMemoryRemoteAddressNV(
  17982. m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( pMemoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( pAddress ) ) );
  17983. }
  17984. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  17985. template <typename Dispatch>
  17986. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RemoteAddressNV>::type
  17987. Device::getMemoryRemoteAddressNV( const VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV & memoryGetRemoteAddressInfo, Dispatch const & d ) const
  17988. {
  17989. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  17990. VULKAN_HPP_NAMESPACE::RemoteAddressNV address;
  17991. VkResult result = d.vkGetMemoryRemoteAddressNV(
  17992. m_device, reinterpret_cast<const VkMemoryGetRemoteAddressInfoNV *>( &memoryGetRemoteAddressInfo ), reinterpret_cast<VkRemoteAddressNV *>( &address ) );
  17993. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryRemoteAddressNV" );
  17994. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), address );
  17995. }
  17996. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  17997. //=== VK_EXT_pipeline_properties ===
  17998. template <typename Dispatch>
  17999. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT * pPipelineInfo,
  18000. VULKAN_HPP_NAMESPACE::BaseOutStructure * pPipelineProperties,
  18001. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18002. {
  18003. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18004. return static_cast<Result>( d.vkGetPipelinePropertiesEXT(
  18005. m_device, reinterpret_cast<const VkPipelineInfoEXT *>( pPipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( pPipelineProperties ) ) );
  18006. }
  18007. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18008. template <typename Dispatch>
  18009. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BaseOutStructure>::type
  18010. Device::getPipelinePropertiesEXT( const VULKAN_HPP_NAMESPACE::PipelineInfoEXT & pipelineInfo, Dispatch const & d ) const
  18011. {
  18012. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18013. VULKAN_HPP_NAMESPACE::BaseOutStructure pipelineProperties;
  18014. VkResult result = d.vkGetPipelinePropertiesEXT(
  18015. m_device, reinterpret_cast<const VkPipelineInfoEXT *>( &pipelineInfo ), reinterpret_cast<VkBaseOutStructure *>( &pipelineProperties ) );
  18016. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelinePropertiesEXT" );
  18017. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineProperties );
  18018. }
  18019. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18020. //=== VK_EXT_extended_dynamic_state2 ===
  18021. template <typename Dispatch>
  18022. VULKAN_HPP_INLINE void CommandBuffer::setPatchControlPointsEXT( uint32_t patchControlPoints, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18023. {
  18024. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18025. d.vkCmdSetPatchControlPointsEXT( m_commandBuffer, patchControlPoints );
  18026. }
  18027. template <typename Dispatch>
  18028. VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable,
  18029. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18030. {
  18031. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18032. d.vkCmdSetRasterizerDiscardEnableEXT( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) );
  18033. }
  18034. template <typename Dispatch>
  18035. VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18036. {
  18037. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18038. d.vkCmdSetDepthBiasEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) );
  18039. }
  18040. template <typename Dispatch>
  18041. VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEXT( VULKAN_HPP_NAMESPACE::LogicOp logicOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18042. {
  18043. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18044. d.vkCmdSetLogicOpEXT( m_commandBuffer, static_cast<VkLogicOp>( logicOp ) );
  18045. }
  18046. template <typename Dispatch>
  18047. VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable,
  18048. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18049. {
  18050. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18051. d.vkCmdSetPrimitiveRestartEnableEXT( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) );
  18052. }
  18053. #if defined( VK_USE_PLATFORM_SCREEN_QNX )
  18054. //=== VK_QNX_screen_surface ===
  18055. template <typename Dispatch>
  18056. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX * pCreateInfo,
  18057. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  18058. VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface,
  18059. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18060. {
  18061. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18062. return static_cast<Result>( d.vkCreateScreenSurfaceQNX( m_instance,
  18063. reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( pCreateInfo ),
  18064. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  18065. reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) );
  18066. }
  18067. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18068. template <typename Dispatch>
  18069. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type
  18070. Instance::createScreenSurfaceQNX( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,
  18071. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  18072. Dispatch const & d ) const
  18073. {
  18074. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18075. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  18076. VkResult result = d.vkCreateScreenSurfaceQNX(
  18077. m_instance,
  18078. reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
  18079. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  18080. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  18081. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNX" );
  18082. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface );
  18083. }
  18084. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  18085. template <typename Dispatch>
  18086. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type
  18087. Instance::createScreenSurfaceQNXUnique( const VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX & createInfo,
  18088. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  18089. Dispatch const & d ) const
  18090. {
  18091. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18092. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  18093. VkResult result = d.vkCreateScreenSurfaceQNX(
  18094. m_instance,
  18095. reinterpret_cast<const VkScreenSurfaceCreateInfoQNX *>( &createInfo ),
  18096. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  18097. reinterpret_cast<VkSurfaceKHR *>( &surface ) );
  18098. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createScreenSurfaceQNXUnique" );
  18099. return createResultValueType(
  18100. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  18101. UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) );
  18102. }
  18103. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  18104. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18105. template <typename Dispatch>
  18106. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex,
  18107. struct _screen_window * window,
  18108. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18109. {
  18110. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18111. return static_cast<Bool32>( d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, window ) );
  18112. }
  18113. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18114. template <typename Dispatch>
  18115. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32
  18116. PhysicalDevice::getScreenPresentationSupportQNX( uint32_t queueFamilyIndex, struct _screen_window & window, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18117. {
  18118. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18119. VkBool32 result = d.vkGetPhysicalDeviceScreenPresentationSupportQNX( m_physicalDevice, queueFamilyIndex, &window );
  18120. return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result );
  18121. }
  18122. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18123. #endif /*VK_USE_PLATFORM_SCREEN_QNX*/
  18124. //=== VK_EXT_color_write_enable ===
  18125. template <typename Dispatch>
  18126. VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( uint32_t attachmentCount,
  18127. const VULKAN_HPP_NAMESPACE::Bool32 * pColorWriteEnables,
  18128. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18129. {
  18130. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18131. d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorWriteEnables ) );
  18132. }
  18133. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18134. template <typename Dispatch>
  18135. VULKAN_HPP_INLINE void CommandBuffer::setColorWriteEnableEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorWriteEnables,
  18136. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18137. {
  18138. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18139. d.vkCmdSetColorWriteEnableEXT( m_commandBuffer, colorWriteEnables.size(), reinterpret_cast<const VkBool32 *>( colorWriteEnables.data() ) );
  18140. }
  18141. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18142. //=== VK_KHR_ray_tracing_maintenance1 ===
  18143. template <typename Dispatch>
  18144. VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirect2KHR( VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress,
  18145. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18146. {
  18147. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18148. d.vkCmdTraceRaysIndirect2KHR( m_commandBuffer, static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
  18149. }
  18150. //=== VK_EXT_multi_draw ===
  18151. template <typename Dispatch>
  18152. VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( uint32_t drawCount,
  18153. const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT * pVertexInfo,
  18154. uint32_t instanceCount,
  18155. uint32_t firstInstance,
  18156. uint32_t stride,
  18157. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18158. {
  18159. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18160. d.vkCmdDrawMultiEXT( m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawInfoEXT *>( pVertexInfo ), instanceCount, firstInstance, stride );
  18161. }
  18162. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18163. template <typename Dispatch>
  18164. VULKAN_HPP_INLINE void CommandBuffer::drawMultiEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT> const & vertexInfo,
  18165. uint32_t instanceCount,
  18166. uint32_t firstInstance,
  18167. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18168. {
  18169. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18170. d.vkCmdDrawMultiEXT( m_commandBuffer,
  18171. vertexInfo.size(),
  18172. reinterpret_cast<const VkMultiDrawInfoEXT *>( vertexInfo.data() ),
  18173. instanceCount,
  18174. firstInstance,
  18175. vertexInfo.stride() );
  18176. }
  18177. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18178. template <typename Dispatch>
  18179. VULKAN_HPP_INLINE void CommandBuffer::drawMultiIndexedEXT( uint32_t drawCount,
  18180. const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT * pIndexInfo,
  18181. uint32_t instanceCount,
  18182. uint32_t firstInstance,
  18183. uint32_t stride,
  18184. const int32_t * pVertexOffset,
  18185. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18186. {
  18187. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18188. d.vkCmdDrawMultiIndexedEXT(
  18189. m_commandBuffer, drawCount, reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( pIndexInfo ), instanceCount, firstInstance, stride, pVertexOffset );
  18190. }
  18191. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18192. template <typename Dispatch>
  18193. VULKAN_HPP_INLINE void
  18194. CommandBuffer::drawMultiIndexedEXT( VULKAN_HPP_NAMESPACE::StridedArrayProxy<const VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT> const & indexInfo,
  18195. uint32_t instanceCount,
  18196. uint32_t firstInstance,
  18197. Optional<const int32_t> vertexOffset,
  18198. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18199. {
  18200. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18201. d.vkCmdDrawMultiIndexedEXT( m_commandBuffer,
  18202. indexInfo.size(),
  18203. reinterpret_cast<const VkMultiDrawIndexedInfoEXT *>( indexInfo.data() ),
  18204. instanceCount,
  18205. firstInstance,
  18206. indexInfo.stride(),
  18207. static_cast<const int32_t *>( vertexOffset ) );
  18208. }
  18209. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18210. //=== VK_EXT_opacity_micromap ===
  18211. template <typename Dispatch>
  18212. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT * pCreateInfo,
  18213. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  18214. VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromap,
  18215. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18216. {
  18217. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18218. return static_cast<Result>( d.vkCreateMicromapEXT( m_device,
  18219. reinterpret_cast<const VkMicromapCreateInfoEXT *>( pCreateInfo ),
  18220. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  18221. reinterpret_cast<VkMicromapEXT *>( pMicromap ) ) );
  18222. }
  18223. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18224. template <typename Dispatch>
  18225. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MicromapEXT>::type
  18226. Device::createMicromapEXT( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,
  18227. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  18228. Dispatch const & d ) const
  18229. {
  18230. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18231. VULKAN_HPP_NAMESPACE::MicromapEXT micromap;
  18232. VkResult result =
  18233. d.vkCreateMicromapEXT( m_device,
  18234. reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ),
  18235. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  18236. reinterpret_cast<VkMicromapEXT *>( &micromap ) );
  18237. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXT" );
  18238. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), micromap );
  18239. }
  18240. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  18241. template <typename Dispatch>
  18242. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>>::type
  18243. Device::createMicromapEXTUnique( const VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT & createInfo,
  18244. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  18245. Dispatch const & d ) const
  18246. {
  18247. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18248. VULKAN_HPP_NAMESPACE::MicromapEXT micromap;
  18249. VkResult result =
  18250. d.vkCreateMicromapEXT( m_device,
  18251. reinterpret_cast<const VkMicromapCreateInfoEXT *>( &createInfo ),
  18252. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  18253. reinterpret_cast<VkMicromapEXT *>( &micromap ) );
  18254. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createMicromapEXTUnique" );
  18255. return createResultValueType(
  18256. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  18257. UniqueHandle<VULKAN_HPP_NAMESPACE::MicromapEXT, Dispatch>( micromap, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  18258. }
  18259. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  18260. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18261. template <typename Dispatch>
  18262. VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
  18263. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  18264. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18265. {
  18266. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18267. d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  18268. }
  18269. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18270. template <typename Dispatch>
  18271. VULKAN_HPP_INLINE void Device::destroyMicromapEXT( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
  18272. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  18273. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18274. {
  18275. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18276. d.vkDestroyMicromapEXT( m_device,
  18277. static_cast<VkMicromapEXT>( micromap ),
  18278. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  18279. }
  18280. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18281. template <typename Dispatch>
  18282. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
  18283. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  18284. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18285. {
  18286. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18287. d.vkDestroyMicromapEXT( m_device, static_cast<VkMicromapEXT>( micromap ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  18288. }
  18289. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18290. template <typename Dispatch>
  18291. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::MicromapEXT micromap,
  18292. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  18293. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18294. {
  18295. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18296. d.vkDestroyMicromapEXT( m_device,
  18297. static_cast<VkMicromapEXT>( micromap ),
  18298. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  18299. }
  18300. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18301. template <typename Dispatch>
  18302. VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( uint32_t infoCount,
  18303. const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,
  18304. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18305. {
  18306. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18307. d.vkCmdBuildMicromapsEXT( m_commandBuffer, infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) );
  18308. }
  18309. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18310. template <typename Dispatch>
  18311. VULKAN_HPP_INLINE void CommandBuffer::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,
  18312. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18313. {
  18314. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18315. d.vkCmdBuildMicromapsEXT( m_commandBuffer, infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
  18316. }
  18317. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18318. template <typename Dispatch>
  18319. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  18320. uint32_t infoCount,
  18321. const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pInfos,
  18322. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18323. {
  18324. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18325. return static_cast<Result>( d.vkBuildMicromapsEXT(
  18326. m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infoCount, reinterpret_cast<const VkMicromapBuildInfoEXT *>( pInfos ) ) );
  18327. }
  18328. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18329. template <typename Dispatch>
  18330. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result
  18331. Device::buildMicromapsEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  18332. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT> const & infos,
  18333. Dispatch const & d ) const
  18334. {
  18335. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18336. VkResult result = d.vkBuildMicromapsEXT(
  18337. m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infos.size(), reinterpret_cast<const VkMicromapBuildInfoEXT *>( infos.data() ) );
  18338. resultCheck(
  18339. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  18340. VULKAN_HPP_NAMESPACE_STRING "::Device::buildMicromapsEXT",
  18341. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  18342. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  18343. }
  18344. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18345. template <typename Dispatch>
  18346. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  18347. const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo,
  18348. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18349. {
  18350. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18351. return static_cast<Result>(
  18352. d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) ) );
  18353. }
  18354. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18355. template <typename Dispatch>
  18356. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  18357. const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info,
  18358. Dispatch const & d ) const
  18359. {
  18360. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18361. VkResult result =
  18362. d.vkCopyMicromapEXT( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
  18363. resultCheck(
  18364. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  18365. VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapEXT",
  18366. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  18367. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  18368. }
  18369. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18370. template <typename Dispatch>
  18371. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMicromapToMemoryEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  18372. const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,
  18373. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18374. {
  18375. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18376. return static_cast<Result>( d.vkCopyMicromapToMemoryEXT(
  18377. m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) ) );
  18378. }
  18379. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18380. template <typename Dispatch>
  18381. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMicromapToMemoryEXT(
  18382. VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info, Dispatch const & d ) const
  18383. {
  18384. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18385. VkResult result = d.vkCopyMicromapToMemoryEXT(
  18386. m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
  18387. resultCheck(
  18388. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  18389. VULKAN_HPP_NAMESPACE_STRING "::Device::copyMicromapToMemoryEXT",
  18390. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  18391. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  18392. }
  18393. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18394. template <typename Dispatch>
  18395. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToMicromapEXT( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation,
  18396. const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,
  18397. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18398. {
  18399. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18400. return static_cast<Result>( d.vkCopyMemoryToMicromapEXT(
  18401. m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) ) );
  18402. }
  18403. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18404. template <typename Dispatch>
  18405. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::copyMemoryToMicromapEXT(
  18406. VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info, Dispatch const & d ) const
  18407. {
  18408. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18409. VkResult result = d.vkCopyMemoryToMicromapEXT(
  18410. m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
  18411. resultCheck(
  18412. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  18413. VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToMicromapEXT",
  18414. { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  18415. return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
  18416. }
  18417. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18418. template <typename Dispatch>
  18419. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeMicromapsPropertiesEXT( uint32_t micromapCount,
  18420. const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,
  18421. VULKAN_HPP_NAMESPACE::QueryType queryType,
  18422. size_t dataSize,
  18423. void * pData,
  18424. size_t stride,
  18425. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18426. {
  18427. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18428. return static_cast<Result>( d.vkWriteMicromapsPropertiesEXT(
  18429. m_device, micromapCount, reinterpret_cast<const VkMicromapEXT *>( pMicromaps ), static_cast<VkQueryType>( queryType ), dataSize, pData, stride ) );
  18430. }
  18431. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18432. template <typename DataType, typename DataTypeAllocator, typename Dispatch>
  18433. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DataType, DataTypeAllocator>>::type
  18434. Device::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
  18435. VULKAN_HPP_NAMESPACE::QueryType queryType,
  18436. size_t dataSize,
  18437. size_t stride,
  18438. Dispatch const & d ) const
  18439. {
  18440. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18441. VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
  18442. std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
  18443. VkResult result = d.vkWriteMicromapsPropertiesEXT( m_device,
  18444. micromaps.size(),
  18445. reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
  18446. static_cast<VkQueryType>( queryType ),
  18447. data.size() * sizeof( DataType ),
  18448. reinterpret_cast<void *>( data.data() ),
  18449. stride );
  18450. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertiesEXT" );
  18451. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  18452. }
  18453. template <typename DataType, typename Dispatch>
  18454. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<DataType>::type
  18455. Device::writeMicromapsPropertyEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
  18456. VULKAN_HPP_NAMESPACE::QueryType queryType,
  18457. size_t stride,
  18458. Dispatch const & d ) const
  18459. {
  18460. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18461. DataType data;
  18462. VkResult result = d.vkWriteMicromapsPropertiesEXT( m_device,
  18463. micromaps.size(),
  18464. reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
  18465. static_cast<VkQueryType>( queryType ),
  18466. sizeof( DataType ),
  18467. reinterpret_cast<void *>( &data ),
  18468. stride );
  18469. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::writeMicromapsPropertyEXT" );
  18470. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  18471. }
  18472. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18473. template <typename Dispatch>
  18474. VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18475. {
  18476. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18477. d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( pInfo ) );
  18478. }
  18479. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18480. template <typename Dispatch>
  18481. VULKAN_HPP_INLINE void CommandBuffer::copyMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18482. {
  18483. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18484. d.vkCmdCopyMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapInfoEXT *>( &info ) );
  18485. }
  18486. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18487. template <typename Dispatch>
  18488. VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT * pInfo,
  18489. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18490. {
  18491. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18492. d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( pInfo ) );
  18493. }
  18494. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18495. template <typename Dispatch>
  18496. VULKAN_HPP_INLINE void CommandBuffer::copyMicromapToMemoryEXT( const VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT & info,
  18497. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18498. {
  18499. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18500. d.vkCmdCopyMicromapToMemoryEXT( m_commandBuffer, reinterpret_cast<const VkCopyMicromapToMemoryInfoEXT *>( &info ) );
  18501. }
  18502. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18503. template <typename Dispatch>
  18504. VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT * pInfo,
  18505. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18506. {
  18507. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18508. d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( pInfo ) );
  18509. }
  18510. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18511. template <typename Dispatch>
  18512. VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToMicromapEXT( const VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT & info,
  18513. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18514. {
  18515. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18516. d.vkCmdCopyMemoryToMicromapEXT( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToMicromapInfoEXT *>( &info ) );
  18517. }
  18518. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18519. template <typename Dispatch>
  18520. VULKAN_HPP_INLINE void CommandBuffer::writeMicromapsPropertiesEXT( uint32_t micromapCount,
  18521. const VULKAN_HPP_NAMESPACE::MicromapEXT * pMicromaps,
  18522. VULKAN_HPP_NAMESPACE::QueryType queryType,
  18523. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  18524. uint32_t firstQuery,
  18525. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18526. {
  18527. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18528. d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer,
  18529. micromapCount,
  18530. reinterpret_cast<const VkMicromapEXT *>( pMicromaps ),
  18531. static_cast<VkQueryType>( queryType ),
  18532. static_cast<VkQueryPool>( queryPool ),
  18533. firstQuery );
  18534. }
  18535. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18536. template <typename Dispatch>
  18537. VULKAN_HPP_INLINE void
  18538. CommandBuffer::writeMicromapsPropertiesEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MicromapEXT> const & micromaps,
  18539. VULKAN_HPP_NAMESPACE::QueryType queryType,
  18540. VULKAN_HPP_NAMESPACE::QueryPool queryPool,
  18541. uint32_t firstQuery,
  18542. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18543. {
  18544. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18545. d.vkCmdWriteMicromapsPropertiesEXT( m_commandBuffer,
  18546. micromaps.size(),
  18547. reinterpret_cast<const VkMicromapEXT *>( micromaps.data() ),
  18548. static_cast<VkQueryType>( queryType ),
  18549. static_cast<VkQueryPool>( queryPool ),
  18550. firstQuery );
  18551. }
  18552. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18553. template <typename Dispatch>
  18554. VULKAN_HPP_INLINE void Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT * pVersionInfo,
  18555. VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR * pCompatibility,
  18556. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18557. {
  18558. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18559. d.vkGetDeviceMicromapCompatibilityEXT( m_device,
  18560. reinterpret_cast<const VkMicromapVersionInfoEXT *>( pVersionInfo ),
  18561. reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
  18562. }
  18563. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18564. template <typename Dispatch>
  18565. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR
  18566. Device::getMicromapCompatibilityEXT( const VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT & versionInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18567. {
  18568. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18569. VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
  18570. d.vkGetDeviceMicromapCompatibilityEXT( m_device,
  18571. reinterpret_cast<const VkMicromapVersionInfoEXT *>( &versionInfo ),
  18572. reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
  18573. return compatibility;
  18574. }
  18575. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18576. template <typename Dispatch>
  18577. VULKAN_HPP_INLINE void Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
  18578. const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT * pBuildInfo,
  18579. VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT * pSizeInfo,
  18580. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18581. {
  18582. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18583. d.vkGetMicromapBuildSizesEXT( m_device,
  18584. static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
  18585. reinterpret_cast<const VkMicromapBuildInfoEXT *>( pBuildInfo ),
  18586. reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( pSizeInfo ) );
  18587. }
  18588. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18589. template <typename Dispatch>
  18590. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT
  18591. Device::getMicromapBuildSizesEXT( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType,
  18592. const VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT & buildInfo,
  18593. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18594. {
  18595. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18596. VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT sizeInfo;
  18597. d.vkGetMicromapBuildSizesEXT( m_device,
  18598. static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ),
  18599. reinterpret_cast<const VkMicromapBuildInfoEXT *>( &buildInfo ),
  18600. reinterpret_cast<VkMicromapBuildSizesInfoEXT *>( &sizeInfo ) );
  18601. return sizeInfo;
  18602. }
  18603. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18604. //=== VK_HUAWEI_cluster_culling_shader ===
  18605. template <typename Dispatch>
  18606. VULKAN_HPP_INLINE void
  18607. CommandBuffer::drawClusterHUAWEI( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18608. {
  18609. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18610. d.vkCmdDrawClusterHUAWEI( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
  18611. }
  18612. template <typename Dispatch>
  18613. VULKAN_HPP_INLINE void CommandBuffer::drawClusterIndirectHUAWEI( VULKAN_HPP_NAMESPACE::Buffer buffer,
  18614. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  18615. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18616. {
  18617. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18618. d.vkCmdDrawClusterIndirectHUAWEI( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
  18619. }
  18620. //=== VK_EXT_pageable_device_local_memory ===
  18621. template <typename Dispatch>
  18622. VULKAN_HPP_INLINE void Device::setMemoryPriorityEXT( VULKAN_HPP_NAMESPACE::DeviceMemory memory, float priority, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18623. {
  18624. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18625. d.vkSetDeviceMemoryPriorityEXT( m_device, static_cast<VkDeviceMemory>( memory ), priority );
  18626. }
  18627. //=== VK_KHR_maintenance4 ===
  18628. template <typename Dispatch>
  18629. VULKAN_HPP_INLINE void Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo,
  18630. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  18631. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18632. {
  18633. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18634. d.vkGetDeviceBufferMemoryRequirementsKHR(
  18635. m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  18636. }
  18637. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18638. template <typename Dispatch>
  18639. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  18640. Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18641. {
  18642. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18643. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  18644. d.vkGetDeviceBufferMemoryRequirementsKHR(
  18645. m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  18646. return memoryRequirements;
  18647. }
  18648. template <typename X, typename Y, typename... Z, typename Dispatch>
  18649. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  18650. Device::getBufferMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18651. {
  18652. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18653. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  18654. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  18655. d.vkGetDeviceBufferMemoryRequirementsKHR(
  18656. m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  18657. return structureChain;
  18658. }
  18659. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18660. template <typename Dispatch>
  18661. VULKAN_HPP_INLINE void Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
  18662. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  18663. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18664. {
  18665. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18666. d.vkGetDeviceImageMemoryRequirementsKHR(
  18667. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  18668. }
  18669. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18670. template <typename Dispatch>
  18671. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  18672. Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18673. {
  18674. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18675. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  18676. d.vkGetDeviceImageMemoryRequirementsKHR(
  18677. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  18678. return memoryRequirements;
  18679. }
  18680. template <typename X, typename Y, typename... Z, typename Dispatch>
  18681. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  18682. Device::getImageMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18683. {
  18684. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18685. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  18686. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  18687. d.vkGetDeviceImageMemoryRequirementsKHR(
  18688. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  18689. return structureChain;
  18690. }
  18691. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18692. template <typename Dispatch>
  18693. VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo,
  18694. uint32_t * pSparseMemoryRequirementCount,
  18695. VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements,
  18696. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18697. {
  18698. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18699. d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
  18700. reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ),
  18701. pSparseMemoryRequirementCount,
  18702. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
  18703. }
  18704. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18705. template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
  18706. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
  18707. Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const
  18708. {
  18709. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18710. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
  18711. uint32_t sparseMemoryRequirementCount;
  18712. d.vkGetDeviceImageSparseMemoryRequirementsKHR(
  18713. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
  18714. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  18715. d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
  18716. reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
  18717. &sparseMemoryRequirementCount,
  18718. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  18719. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  18720. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  18721. {
  18722. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  18723. }
  18724. return sparseMemoryRequirements;
  18725. }
  18726. template <typename SparseImageMemoryRequirements2Allocator,
  18727. typename Dispatch,
  18728. typename B1,
  18729. typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type>
  18730. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator>
  18731. Device::getImageSparseMemoryRequirementsKHR( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info,
  18732. SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator,
  18733. Dispatch const & d ) const
  18734. {
  18735. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18736. std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements(
  18737. sparseImageMemoryRequirements2Allocator );
  18738. uint32_t sparseMemoryRequirementCount;
  18739. d.vkGetDeviceImageSparseMemoryRequirementsKHR(
  18740. m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr );
  18741. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  18742. d.vkGetDeviceImageSparseMemoryRequirementsKHR( m_device,
  18743. reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ),
  18744. &sparseMemoryRequirementCount,
  18745. reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  18746. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  18747. if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
  18748. {
  18749. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  18750. }
  18751. return sparseMemoryRequirements;
  18752. }
  18753. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18754. //=== VK_VALVE_descriptor_set_host_mapping ===
  18755. template <typename Dispatch>
  18756. VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE * pBindingReference,
  18757. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE * pHostMapping,
  18758. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18759. {
  18760. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18761. d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device,
  18762. reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( pBindingReference ),
  18763. reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( pHostMapping ) );
  18764. }
  18765. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18766. template <typename Dispatch>
  18767. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE
  18768. Device::getDescriptorSetLayoutHostMappingInfoVALVE( const VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE & bindingReference,
  18769. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18770. {
  18771. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18772. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE hostMapping;
  18773. d.vkGetDescriptorSetLayoutHostMappingInfoVALVE( m_device,
  18774. reinterpret_cast<const VkDescriptorSetBindingReferenceVALVE *>( &bindingReference ),
  18775. reinterpret_cast<VkDescriptorSetLayoutHostMappingInfoVALVE *>( &hostMapping ) );
  18776. return hostMapping;
  18777. }
  18778. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18779. template <typename Dispatch>
  18780. VULKAN_HPP_INLINE void
  18781. Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, void ** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18782. {
  18783. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18784. d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), ppData );
  18785. }
  18786. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18787. template <typename Dispatch>
  18788. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE void * Device::getDescriptorSetHostMappingVALVE( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet,
  18789. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18790. {
  18791. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18792. void * pData;
  18793. d.vkGetDescriptorSetHostMappingVALVE( m_device, static_cast<VkDescriptorSet>( descriptorSet ), &pData );
  18794. return pData;
  18795. }
  18796. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18797. //=== VK_NV_copy_memory_indirect ===
  18798. template <typename Dispatch>
  18799. VULKAN_HPP_INLINE void CommandBuffer::copyMemoryIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,
  18800. uint32_t copyCount,
  18801. uint32_t stride,
  18802. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18803. {
  18804. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18805. d.vkCmdCopyMemoryIndirectNV( m_commandBuffer, static_cast<VkDeviceAddress>( copyBufferAddress ), copyCount, stride );
  18806. }
  18807. template <typename Dispatch>
  18808. VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,
  18809. uint32_t copyCount,
  18810. uint32_t stride,
  18811. VULKAN_HPP_NAMESPACE::Image dstImage,
  18812. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  18813. const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers * pImageSubresources,
  18814. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18815. {
  18816. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18817. d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer,
  18818. static_cast<VkDeviceAddress>( copyBufferAddress ),
  18819. copyCount,
  18820. stride,
  18821. static_cast<VkImage>( dstImage ),
  18822. static_cast<VkImageLayout>( dstImageLayout ),
  18823. reinterpret_cast<const VkImageSubresourceLayers *>( pImageSubresources ) );
  18824. }
  18825. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18826. template <typename Dispatch>
  18827. VULKAN_HPP_INLINE void
  18828. CommandBuffer::copyMemoryToImageIndirectNV( VULKAN_HPP_NAMESPACE::DeviceAddress copyBufferAddress,
  18829. uint32_t stride,
  18830. VULKAN_HPP_NAMESPACE::Image dstImage,
  18831. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout,
  18832. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceLayers> const & imageSubresources,
  18833. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18834. {
  18835. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18836. d.vkCmdCopyMemoryToImageIndirectNV( m_commandBuffer,
  18837. static_cast<VkDeviceAddress>( copyBufferAddress ),
  18838. imageSubresources.size(),
  18839. stride,
  18840. static_cast<VkImage>( dstImage ),
  18841. static_cast<VkImageLayout>( dstImageLayout ),
  18842. reinterpret_cast<const VkImageSubresourceLayers *>( imageSubresources.data() ) );
  18843. }
  18844. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18845. //=== VK_NV_memory_decompression ===
  18846. template <typename Dispatch>
  18847. VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryNV( uint32_t decompressRegionCount,
  18848. const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV * pDecompressMemoryRegions,
  18849. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18850. {
  18851. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18852. d.vkCmdDecompressMemoryNV( m_commandBuffer, decompressRegionCount, reinterpret_cast<const VkDecompressMemoryRegionNV *>( pDecompressMemoryRegions ) );
  18853. }
  18854. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18855. template <typename Dispatch>
  18856. VULKAN_HPP_INLINE void
  18857. CommandBuffer::decompressMemoryNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV> const & decompressMemoryRegions,
  18858. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18859. {
  18860. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18861. d.vkCmdDecompressMemoryNV(
  18862. m_commandBuffer, decompressMemoryRegions.size(), reinterpret_cast<const VkDecompressMemoryRegionNV *>( decompressMemoryRegions.data() ) );
  18863. }
  18864. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18865. template <typename Dispatch>
  18866. VULKAN_HPP_INLINE void CommandBuffer::decompressMemoryIndirectCountNV( VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsAddress,
  18867. VULKAN_HPP_NAMESPACE::DeviceAddress indirectCommandsCountAddress,
  18868. uint32_t stride,
  18869. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18870. {
  18871. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18872. d.vkCmdDecompressMemoryIndirectCountNV(
  18873. m_commandBuffer, static_cast<VkDeviceAddress>( indirectCommandsAddress ), static_cast<VkDeviceAddress>( indirectCommandsCountAddress ), stride );
  18874. }
  18875. //=== VK_NV_device_generated_commands_compute ===
  18876. template <typename Dispatch>
  18877. VULKAN_HPP_INLINE void Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfo,
  18878. VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements,
  18879. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18880. {
  18881. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18882. d.vkGetPipelineIndirectMemoryRequirementsNV(
  18883. m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
  18884. }
  18885. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18886. template <typename Dispatch>
  18887. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2
  18888. Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
  18889. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18890. {
  18891. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18892. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  18893. d.vkGetPipelineIndirectMemoryRequirementsNV(
  18894. m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  18895. return memoryRequirements;
  18896. }
  18897. template <typename X, typename Y, typename... Z, typename Dispatch>
  18898. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  18899. Device::getPipelineIndirectMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo,
  18900. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18901. {
  18902. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18903. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  18904. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  18905. d.vkGetPipelineIndirectMemoryRequirementsNV(
  18906. m_device, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  18907. return structureChain;
  18908. }
  18909. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18910. template <typename Dispatch>
  18911. VULKAN_HPP_INLINE void CommandBuffer::updatePipelineIndirectBufferNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint,
  18912. VULKAN_HPP_NAMESPACE::Pipeline pipeline,
  18913. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18914. {
  18915. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18916. d.vkCmdUpdatePipelineIndirectBufferNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
  18917. }
  18918. template <typename Dispatch>
  18919. VULKAN_HPP_INLINE DeviceAddress Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV * pInfo,
  18920. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18921. {
  18922. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18923. return static_cast<DeviceAddress>(
  18924. d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( pInfo ) ) );
  18925. }
  18926. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18927. template <typename Dispatch>
  18928. VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress
  18929. Device::getPipelineIndirectAddressNV( const VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18930. {
  18931. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18932. VkDeviceAddress result = d.vkGetPipelineIndirectDeviceAddressNV( m_device, reinterpret_cast<const VkPipelineIndirectDeviceAddressInfoNV *>( &info ) );
  18933. return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
  18934. }
  18935. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18936. //=== VK_EXT_extended_dynamic_state3 ===
  18937. template <typename Dispatch>
  18938. VULKAN_HPP_INLINE void CommandBuffer::setTessellationDomainOriginEXT( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin,
  18939. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18940. {
  18941. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18942. d.vkCmdSetTessellationDomainOriginEXT( m_commandBuffer, static_cast<VkTessellationDomainOrigin>( domainOrigin ) );
  18943. }
  18944. template <typename Dispatch>
  18945. VULKAN_HPP_INLINE void CommandBuffer::setDepthClampEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18946. {
  18947. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18948. d.vkCmdSetDepthClampEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClampEnable ) );
  18949. }
  18950. template <typename Dispatch>
  18951. VULKAN_HPP_INLINE void CommandBuffer::setPolygonModeEXT( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18952. {
  18953. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18954. d.vkCmdSetPolygonModeEXT( m_commandBuffer, static_cast<VkPolygonMode>( polygonMode ) );
  18955. }
  18956. template <typename Dispatch>
  18957. VULKAN_HPP_INLINE void CommandBuffer::setRasterizationSamplesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples,
  18958. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18959. {
  18960. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18961. d.vkCmdSetRasterizationSamplesEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( rasterizationSamples ) );
  18962. }
  18963. template <typename Dispatch>
  18964. VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
  18965. const VULKAN_HPP_NAMESPACE::SampleMask * pSampleMask,
  18966. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18967. {
  18968. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18969. d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( pSampleMask ) );
  18970. }
  18971. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  18972. template <typename Dispatch>
  18973. VULKAN_HPP_INLINE void CommandBuffer::setSampleMaskEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples,
  18974. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SampleMask> const & sampleMask,
  18975. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  18976. {
  18977. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18978. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  18979. VULKAN_HPP_ASSERT( sampleMask.size() == ( static_cast<uint32_t>( samples ) + 31 ) / 32 );
  18980. # else
  18981. if ( sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32 )
  18982. {
  18983. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setSampleMaskEXT: sampleMask.size() != ( static_cast<uint32_t>( samples ) + 31 ) / 32" );
  18984. }
  18985. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  18986. d.vkCmdSetSampleMaskEXT( m_commandBuffer, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<const VkSampleMask *>( sampleMask.data() ) );
  18987. }
  18988. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  18989. template <typename Dispatch>
  18990. VULKAN_HPP_INLINE void CommandBuffer::setAlphaToCoverageEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable,
  18991. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18992. {
  18993. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  18994. d.vkCmdSetAlphaToCoverageEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToCoverageEnable ) );
  18995. }
  18996. template <typename Dispatch>
  18997. VULKAN_HPP_INLINE void CommandBuffer::setAlphaToOneEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  18998. {
  18999. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19000. d.vkCmdSetAlphaToOneEnableEXT( m_commandBuffer, static_cast<VkBool32>( alphaToOneEnable ) );
  19001. }
  19002. template <typename Dispatch>
  19003. VULKAN_HPP_INLINE void CommandBuffer::setLogicOpEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19004. {
  19005. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19006. d.vkCmdSetLogicOpEnableEXT( m_commandBuffer, static_cast<VkBool32>( logicOpEnable ) );
  19007. }
  19008. template <typename Dispatch>
  19009. VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment,
  19010. uint32_t attachmentCount,
  19011. const VULKAN_HPP_NAMESPACE::Bool32 * pColorBlendEnables,
  19012. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19013. {
  19014. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19015. d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkBool32 *>( pColorBlendEnables ) );
  19016. }
  19017. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19018. template <typename Dispatch>
  19019. VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEnableEXT( uint32_t firstAttachment,
  19020. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Bool32> const & colorBlendEnables,
  19021. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19022. {
  19023. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19024. d.vkCmdSetColorBlendEnableEXT( m_commandBuffer, firstAttachment, colorBlendEnables.size(), reinterpret_cast<const VkBool32 *>( colorBlendEnables.data() ) );
  19025. }
  19026. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19027. template <typename Dispatch>
  19028. VULKAN_HPP_INLINE void CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment,
  19029. uint32_t attachmentCount,
  19030. const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT * pColorBlendEquations,
  19031. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19032. {
  19033. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19034. d.vkCmdSetColorBlendEquationEXT(
  19035. m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendEquationEXT *>( pColorBlendEquations ) );
  19036. }
  19037. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19038. template <typename Dispatch>
  19039. VULKAN_HPP_INLINE void
  19040. CommandBuffer::setColorBlendEquationEXT( uint32_t firstAttachment,
  19041. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT> const & colorBlendEquations,
  19042. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19043. {
  19044. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19045. d.vkCmdSetColorBlendEquationEXT(
  19046. m_commandBuffer, firstAttachment, colorBlendEquations.size(), reinterpret_cast<const VkColorBlendEquationEXT *>( colorBlendEquations.data() ) );
  19047. }
  19048. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19049. template <typename Dispatch>
  19050. VULKAN_HPP_INLINE void CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment,
  19051. uint32_t attachmentCount,
  19052. const VULKAN_HPP_NAMESPACE::ColorComponentFlags * pColorWriteMasks,
  19053. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19054. {
  19055. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19056. d.vkCmdSetColorWriteMaskEXT( m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorComponentFlags *>( pColorWriteMasks ) );
  19057. }
  19058. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19059. template <typename Dispatch>
  19060. VULKAN_HPP_INLINE void
  19061. CommandBuffer::setColorWriteMaskEXT( uint32_t firstAttachment,
  19062. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorComponentFlags> const & colorWriteMasks,
  19063. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19064. {
  19065. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19066. d.vkCmdSetColorWriteMaskEXT(
  19067. m_commandBuffer, firstAttachment, colorWriteMasks.size(), reinterpret_cast<const VkColorComponentFlags *>( colorWriteMasks.data() ) );
  19068. }
  19069. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19070. template <typename Dispatch>
  19071. VULKAN_HPP_INLINE void CommandBuffer::setRasterizationStreamEXT( uint32_t rasterizationStream, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19072. {
  19073. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19074. d.vkCmdSetRasterizationStreamEXT( m_commandBuffer, rasterizationStream );
  19075. }
  19076. template <typename Dispatch>
  19077. VULKAN_HPP_INLINE void
  19078. CommandBuffer::setConservativeRasterizationModeEXT( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode,
  19079. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19080. {
  19081. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19082. d.vkCmdSetConservativeRasterizationModeEXT( m_commandBuffer, static_cast<VkConservativeRasterizationModeEXT>( conservativeRasterizationMode ) );
  19083. }
  19084. template <typename Dispatch>
  19085. VULKAN_HPP_INLINE void CommandBuffer::setExtraPrimitiveOverestimationSizeEXT( float extraPrimitiveOverestimationSize,
  19086. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19087. {
  19088. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19089. d.vkCmdSetExtraPrimitiveOverestimationSizeEXT( m_commandBuffer, extraPrimitiveOverestimationSize );
  19090. }
  19091. template <typename Dispatch>
  19092. VULKAN_HPP_INLINE void CommandBuffer::setDepthClipEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19093. {
  19094. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19095. d.vkCmdSetDepthClipEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthClipEnable ) );
  19096. }
  19097. template <typename Dispatch>
  19098. VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable,
  19099. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19100. {
  19101. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19102. d.vkCmdSetSampleLocationsEnableEXT( m_commandBuffer, static_cast<VkBool32>( sampleLocationsEnable ) );
  19103. }
  19104. template <typename Dispatch>
  19105. VULKAN_HPP_INLINE void CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment,
  19106. uint32_t attachmentCount,
  19107. const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT * pColorBlendAdvanced,
  19108. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19109. {
  19110. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19111. d.vkCmdSetColorBlendAdvancedEXT(
  19112. m_commandBuffer, firstAttachment, attachmentCount, reinterpret_cast<const VkColorBlendAdvancedEXT *>( pColorBlendAdvanced ) );
  19113. }
  19114. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19115. template <typename Dispatch>
  19116. VULKAN_HPP_INLINE void
  19117. CommandBuffer::setColorBlendAdvancedEXT( uint32_t firstAttachment,
  19118. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT> const & colorBlendAdvanced,
  19119. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19120. {
  19121. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19122. d.vkCmdSetColorBlendAdvancedEXT(
  19123. m_commandBuffer, firstAttachment, colorBlendAdvanced.size(), reinterpret_cast<const VkColorBlendAdvancedEXT *>( colorBlendAdvanced.data() ) );
  19124. }
  19125. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19126. template <typename Dispatch>
  19127. VULKAN_HPP_INLINE void CommandBuffer::setProvokingVertexModeEXT( VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT provokingVertexMode,
  19128. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19129. {
  19130. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19131. d.vkCmdSetProvokingVertexModeEXT( m_commandBuffer, static_cast<VkProvokingVertexModeEXT>( provokingVertexMode ) );
  19132. }
  19133. template <typename Dispatch>
  19134. VULKAN_HPP_INLINE void CommandBuffer::setLineRasterizationModeEXT( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode,
  19135. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19136. {
  19137. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19138. d.vkCmdSetLineRasterizationModeEXT( m_commandBuffer, static_cast<VkLineRasterizationModeEXT>( lineRasterizationMode ) );
  19139. }
  19140. template <typename Dispatch>
  19141. VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19142. {
  19143. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19144. d.vkCmdSetLineStippleEnableEXT( m_commandBuffer, static_cast<VkBool32>( stippledLineEnable ) );
  19145. }
  19146. template <typename Dispatch>
  19147. VULKAN_HPP_INLINE void CommandBuffer::setDepthClipNegativeOneToOneEXT( VULKAN_HPP_NAMESPACE::Bool32 negativeOneToOne,
  19148. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19149. {
  19150. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19151. d.vkCmdSetDepthClipNegativeOneToOneEXT( m_commandBuffer, static_cast<VkBool32>( negativeOneToOne ) );
  19152. }
  19153. template <typename Dispatch>
  19154. VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingEnableNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable,
  19155. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19156. {
  19157. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19158. d.vkCmdSetViewportWScalingEnableNV( m_commandBuffer, static_cast<VkBool32>( viewportWScalingEnable ) );
  19159. }
  19160. template <typename Dispatch>
  19161. VULKAN_HPP_INLINE void CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport,
  19162. uint32_t viewportCount,
  19163. const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV * pViewportSwizzles,
  19164. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19165. {
  19166. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19167. d.vkCmdSetViewportSwizzleNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportSwizzleNV *>( pViewportSwizzles ) );
  19168. }
  19169. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19170. template <typename Dispatch>
  19171. VULKAN_HPP_INLINE void
  19172. CommandBuffer::setViewportSwizzleNV( uint32_t firstViewport,
  19173. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles,
  19174. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19175. {
  19176. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19177. d.vkCmdSetViewportSwizzleNV(
  19178. m_commandBuffer, firstViewport, viewportSwizzles.size(), reinterpret_cast<const VkViewportSwizzleNV *>( viewportSwizzles.data() ) );
  19179. }
  19180. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19181. template <typename Dispatch>
  19182. VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable,
  19183. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19184. {
  19185. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19186. d.vkCmdSetCoverageToColorEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageToColorEnable ) );
  19187. }
  19188. template <typename Dispatch>
  19189. VULKAN_HPP_INLINE void CommandBuffer::setCoverageToColorLocationNV( uint32_t coverageToColorLocation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19190. {
  19191. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19192. d.vkCmdSetCoverageToColorLocationNV( m_commandBuffer, coverageToColorLocation );
  19193. }
  19194. template <typename Dispatch>
  19195. VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationModeNV( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode,
  19196. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19197. {
  19198. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19199. d.vkCmdSetCoverageModulationModeNV( m_commandBuffer, static_cast<VkCoverageModulationModeNV>( coverageModulationMode ) );
  19200. }
  19201. template <typename Dispatch>
  19202. VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableEnableNV( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable,
  19203. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19204. {
  19205. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19206. d.vkCmdSetCoverageModulationTableEnableNV( m_commandBuffer, static_cast<VkBool32>( coverageModulationTableEnable ) );
  19207. }
  19208. template <typename Dispatch>
  19209. VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( uint32_t coverageModulationTableCount,
  19210. const float * pCoverageModulationTable,
  19211. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19212. {
  19213. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19214. d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTableCount, pCoverageModulationTable );
  19215. }
  19216. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19217. template <typename Dispatch>
  19218. VULKAN_HPP_INLINE void CommandBuffer::setCoverageModulationTableNV( VULKAN_HPP_NAMESPACE::ArrayProxy<const float> const & coverageModulationTable,
  19219. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19220. {
  19221. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19222. d.vkCmdSetCoverageModulationTableNV( m_commandBuffer, coverageModulationTable.size(), coverageModulationTable.data() );
  19223. }
  19224. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19225. template <typename Dispatch>
  19226. VULKAN_HPP_INLINE void CommandBuffer::setShadingRateImageEnableNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable,
  19227. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19228. {
  19229. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19230. d.vkCmdSetShadingRateImageEnableNV( m_commandBuffer, static_cast<VkBool32>( shadingRateImageEnable ) );
  19231. }
  19232. template <typename Dispatch>
  19233. VULKAN_HPP_INLINE void CommandBuffer::setRepresentativeFragmentTestEnableNV( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable,
  19234. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19235. {
  19236. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19237. d.vkCmdSetRepresentativeFragmentTestEnableNV( m_commandBuffer, static_cast<VkBool32>( representativeFragmentTestEnable ) );
  19238. }
  19239. template <typename Dispatch>
  19240. VULKAN_HPP_INLINE void CommandBuffer::setCoverageReductionModeNV( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode,
  19241. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19242. {
  19243. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19244. d.vkCmdSetCoverageReductionModeNV( m_commandBuffer, static_cast<VkCoverageReductionModeNV>( coverageReductionMode ) );
  19245. }
  19246. //=== VK_EXT_shader_module_identifier ===
  19247. template <typename Dispatch>
  19248. VULKAN_HPP_INLINE void Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule,
  19249. VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier,
  19250. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19251. {
  19252. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19253. d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) );
  19254. }
  19255. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19256. template <typename Dispatch>
  19257. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT
  19258. Device::getShaderModuleIdentifierEXT( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19259. {
  19260. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19261. VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier;
  19262. d.vkGetShaderModuleIdentifierEXT( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
  19263. return identifier;
  19264. }
  19265. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19266. template <typename Dispatch>
  19267. VULKAN_HPP_INLINE void Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo,
  19268. VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT * pIdentifier,
  19269. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19270. {
  19271. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19272. d.vkGetShaderModuleCreateInfoIdentifierEXT(
  19273. m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( pIdentifier ) );
  19274. }
  19275. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19276. template <typename Dispatch>
  19277. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT
  19278. Device::getShaderModuleCreateInfoIdentifierEXT( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo,
  19279. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19280. {
  19281. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19282. VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT identifier;
  19283. d.vkGetShaderModuleCreateInfoIdentifierEXT(
  19284. m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<VkShaderModuleIdentifierEXT *>( &identifier ) );
  19285. return identifier;
  19286. }
  19287. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19288. //=== VK_NV_optical_flow ===
  19289. template <typename Dispatch>
  19290. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  19291. PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo,
  19292. uint32_t * pFormatCount,
  19293. VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV * pImageFormatProperties,
  19294. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19295. {
  19296. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19297. return static_cast<Result>(
  19298. d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
  19299. reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( pOpticalFlowImageFormatInfo ),
  19300. pFormatCount,
  19301. reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( pImageFormatProperties ) ) );
  19302. }
  19303. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19304. template <typename OpticalFlowImageFormatPropertiesNVAllocator, typename Dispatch>
  19305. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  19306. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type
  19307. PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,
  19308. Dispatch const & d ) const
  19309. {
  19310. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19311. std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties;
  19312. uint32_t formatCount;
  19313. VkResult result;
  19314. do
  19315. {
  19316. result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
  19317. m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr );
  19318. if ( ( result == VK_SUCCESS ) && formatCount )
  19319. {
  19320. imageFormatProperties.resize( formatCount );
  19321. result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
  19322. reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
  19323. &formatCount,
  19324. reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) );
  19325. }
  19326. } while ( result == VK_INCOMPLETE );
  19327. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" );
  19328. VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() );
  19329. if ( formatCount < imageFormatProperties.size() )
  19330. {
  19331. imageFormatProperties.resize( formatCount );
  19332. }
  19333. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
  19334. }
  19335. template <typename OpticalFlowImageFormatPropertiesNVAllocator,
  19336. typename Dispatch,
  19337. typename B1,
  19338. typename std::enable_if<std::is_same<typename B1::value_type, OpticalFlowImageFormatPropertiesNV>::value, int>::type>
  19339. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  19340. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator>>::type
  19341. PhysicalDevice::getOpticalFlowImageFormatsNV( const VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV & opticalFlowImageFormatInfo,
  19342. OpticalFlowImageFormatPropertiesNVAllocator & opticalFlowImageFormatPropertiesNVAllocator,
  19343. Dispatch const & d ) const
  19344. {
  19345. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19346. std::vector<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV, OpticalFlowImageFormatPropertiesNVAllocator> imageFormatProperties(
  19347. opticalFlowImageFormatPropertiesNVAllocator );
  19348. uint32_t formatCount;
  19349. VkResult result;
  19350. do
  19351. {
  19352. result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
  19353. m_physicalDevice, reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ), &formatCount, nullptr );
  19354. if ( ( result == VK_SUCCESS ) && formatCount )
  19355. {
  19356. imageFormatProperties.resize( formatCount );
  19357. result = d.vkGetPhysicalDeviceOpticalFlowImageFormatsNV( m_physicalDevice,
  19358. reinterpret_cast<const VkOpticalFlowImageFormatInfoNV *>( &opticalFlowImageFormatInfo ),
  19359. &formatCount,
  19360. reinterpret_cast<VkOpticalFlowImageFormatPropertiesNV *>( imageFormatProperties.data() ) );
  19361. }
  19362. } while ( result == VK_INCOMPLETE );
  19363. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getOpticalFlowImageFormatsNV" );
  19364. VULKAN_HPP_ASSERT( formatCount <= imageFormatProperties.size() );
  19365. if ( formatCount < imageFormatProperties.size() )
  19366. {
  19367. imageFormatProperties.resize( formatCount );
  19368. }
  19369. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
  19370. }
  19371. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19372. template <typename Dispatch>
  19373. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV * pCreateInfo,
  19374. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  19375. VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV * pSession,
  19376. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19377. {
  19378. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19379. return static_cast<Result>( d.vkCreateOpticalFlowSessionNV( m_device,
  19380. reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( pCreateInfo ),
  19381. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  19382. reinterpret_cast<VkOpticalFlowSessionNV *>( pSession ) ) );
  19383. }
  19384. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19385. template <typename Dispatch>
  19386. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>::type
  19387. Device::createOpticalFlowSessionNV( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,
  19388. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  19389. Dispatch const & d ) const
  19390. {
  19391. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19392. VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session;
  19393. VkResult result = d.vkCreateOpticalFlowSessionNV(
  19394. m_device,
  19395. reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ),
  19396. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  19397. reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) );
  19398. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNV" );
  19399. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), session );
  19400. }
  19401. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  19402. template <typename Dispatch>
  19403. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>>::type
  19404. Device::createOpticalFlowSessionNVUnique( const VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV & createInfo,
  19405. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  19406. Dispatch const & d ) const
  19407. {
  19408. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19409. VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session;
  19410. VkResult result = d.vkCreateOpticalFlowSessionNV(
  19411. m_device,
  19412. reinterpret_cast<const VkOpticalFlowSessionCreateInfoNV *>( &createInfo ),
  19413. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  19414. reinterpret_cast<VkOpticalFlowSessionNV *>( &session ) );
  19415. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createOpticalFlowSessionNVUnique" );
  19416. return createResultValueType(
  19417. static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  19418. UniqueHandle<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV, Dispatch>( session, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  19419. }
  19420. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  19421. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19422. template <typename Dispatch>
  19423. VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
  19424. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  19425. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19426. {
  19427. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19428. d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  19429. }
  19430. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19431. template <typename Dispatch>
  19432. VULKAN_HPP_INLINE void Device::destroyOpticalFlowSessionNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
  19433. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  19434. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19435. {
  19436. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19437. d.vkDestroyOpticalFlowSessionNV(
  19438. m_device,
  19439. static_cast<VkOpticalFlowSessionNV>( session ),
  19440. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  19441. }
  19442. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19443. template <typename Dispatch>
  19444. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
  19445. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  19446. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19447. {
  19448. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19449. d.vkDestroyOpticalFlowSessionNV( m_device, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  19450. }
  19451. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19452. template <typename Dispatch>
  19453. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
  19454. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  19455. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19456. {
  19457. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19458. d.vkDestroyOpticalFlowSessionNV(
  19459. m_device,
  19460. static_cast<VkOpticalFlowSessionNV>( session ),
  19461. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  19462. }
  19463. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19464. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19465. template <typename Dispatch>
  19466. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
  19467. VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,
  19468. VULKAN_HPP_NAMESPACE::ImageView view,
  19469. VULKAN_HPP_NAMESPACE::ImageLayout layout,
  19470. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19471. {
  19472. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19473. return static_cast<Result>( d.vkBindOpticalFlowSessionImageNV( m_device,
  19474. static_cast<VkOpticalFlowSessionNV>( session ),
  19475. static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ),
  19476. static_cast<VkImageView>( view ),
  19477. static_cast<VkImageLayout>( layout ) ) );
  19478. }
  19479. #else
  19480. template <typename Dispatch>
  19481. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type
  19482. Device::bindOpticalFlowSessionImageNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
  19483. VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV bindingPoint,
  19484. VULKAN_HPP_NAMESPACE::ImageView view,
  19485. VULKAN_HPP_NAMESPACE::ImageLayout layout,
  19486. Dispatch const & d ) const
  19487. {
  19488. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19489. VkResult result = d.vkBindOpticalFlowSessionImageNV( m_device,
  19490. static_cast<VkOpticalFlowSessionNV>( session ),
  19491. static_cast<VkOpticalFlowSessionBindingPointNV>( bindingPoint ),
  19492. static_cast<VkImageView>( view ),
  19493. static_cast<VkImageLayout>( layout ) );
  19494. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindOpticalFlowSessionImageNV" );
  19495. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  19496. }
  19497. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  19498. template <typename Dispatch>
  19499. VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
  19500. const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV * pExecuteInfo,
  19501. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19502. {
  19503. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19504. d.vkCmdOpticalFlowExecuteNV(
  19505. m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( pExecuteInfo ) );
  19506. }
  19507. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19508. template <typename Dispatch>
  19509. VULKAN_HPP_INLINE void CommandBuffer::opticalFlowExecuteNV( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV session,
  19510. const VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV & executeInfo,
  19511. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19512. {
  19513. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19514. d.vkCmdOpticalFlowExecuteNV(
  19515. m_commandBuffer, static_cast<VkOpticalFlowSessionNV>( session ), reinterpret_cast<const VkOpticalFlowExecuteInfoNV *>( &executeInfo ) );
  19516. }
  19517. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19518. //=== VK_KHR_maintenance5 ===
  19519. template <typename Dispatch>
  19520. VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer2KHR( VULKAN_HPP_NAMESPACE::Buffer buffer,
  19521. VULKAN_HPP_NAMESPACE::DeviceSize offset,
  19522. VULKAN_HPP_NAMESPACE::DeviceSize size,
  19523. VULKAN_HPP_NAMESPACE::IndexType indexType,
  19524. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19525. {
  19526. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19527. d.vkCmdBindIndexBuffer2KHR( m_commandBuffer,
  19528. static_cast<VkBuffer>( buffer ),
  19529. static_cast<VkDeviceSize>( offset ),
  19530. static_cast<VkDeviceSize>( size ),
  19531. static_cast<VkIndexType>( indexType ) );
  19532. }
  19533. template <typename Dispatch>
  19534. VULKAN_HPP_INLINE void Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR * pRenderingAreaInfo,
  19535. VULKAN_HPP_NAMESPACE::Extent2D * pGranularity,
  19536. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19537. {
  19538. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19539. d.vkGetRenderingAreaGranularityKHR(
  19540. m_device, reinterpret_cast<const VkRenderingAreaInfoKHR *>( pRenderingAreaInfo ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
  19541. }
  19542. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19543. template <typename Dispatch>
  19544. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D
  19545. Device::getRenderingAreaGranularityKHR( const VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR & renderingAreaInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19546. {
  19547. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19548. VULKAN_HPP_NAMESPACE::Extent2D granularity;
  19549. d.vkGetRenderingAreaGranularityKHR(
  19550. m_device, reinterpret_cast<const VkRenderingAreaInfoKHR *>( &renderingAreaInfo ), reinterpret_cast<VkExtent2D *>( &granularity ) );
  19551. return granularity;
  19552. }
  19553. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19554. template <typename Dispatch>
  19555. VULKAN_HPP_INLINE void Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR * pInfo,
  19556. VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,
  19557. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19558. {
  19559. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19560. d.vkGetDeviceImageSubresourceLayoutKHR(
  19561. m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( pInfo ), reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) );
  19562. }
  19563. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19564. template <typename Dispatch>
  19565. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR
  19566. Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19567. {
  19568. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19569. VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
  19570. d.vkGetDeviceImageSubresourceLayoutKHR(
  19571. m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ), reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
  19572. return layout;
  19573. }
  19574. template <typename X, typename Y, typename... Z, typename Dispatch>
  19575. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>
  19576. Device::getImageSubresourceLayoutKHR( const VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19577. {
  19578. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19579. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  19580. VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
  19581. d.vkGetDeviceImageSubresourceLayoutKHR(
  19582. m_device, reinterpret_cast<const VkDeviceImageSubresourceInfoKHR *>( &info ), reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
  19583. return structureChain;
  19584. }
  19585. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19586. template <typename Dispatch>
  19587. VULKAN_HPP_INLINE void Device::getImageSubresourceLayout2KHR( VULKAN_HPP_NAMESPACE::Image image,
  19588. const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR * pSubresource,
  19589. VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR * pLayout,
  19590. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19591. {
  19592. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19593. d.vkGetImageSubresourceLayout2KHR( m_device,
  19594. static_cast<VkImage>( image ),
  19595. reinterpret_cast<const VkImageSubresource2KHR *>( pSubresource ),
  19596. reinterpret_cast<VkSubresourceLayout2KHR *>( pLayout ) );
  19597. }
  19598. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19599. template <typename Dispatch>
  19600. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR Device::getImageSubresourceLayout2KHR(
  19601. VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19602. {
  19603. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19604. VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR layout;
  19605. d.vkGetImageSubresourceLayout2KHR( m_device,
  19606. static_cast<VkImage>( image ),
  19607. reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
  19608. reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
  19609. return layout;
  19610. }
  19611. template <typename X, typename Y, typename... Z, typename Dispatch>
  19612. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> Device::getImageSubresourceLayout2KHR(
  19613. VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource2KHR & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19614. {
  19615. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19616. VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...> structureChain;
  19617. VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR & layout = structureChain.template get<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>();
  19618. d.vkGetImageSubresourceLayout2KHR( m_device,
  19619. static_cast<VkImage>( image ),
  19620. reinterpret_cast<const VkImageSubresource2KHR *>( &subresource ),
  19621. reinterpret_cast<VkSubresourceLayout2KHR *>( &layout ) );
  19622. return structureChain;
  19623. }
  19624. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19625. //=== VK_EXT_shader_object ===
  19626. template <typename Dispatch>
  19627. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShadersEXT( uint32_t createInfoCount,
  19628. const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT * pCreateInfos,
  19629. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  19630. VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,
  19631. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19632. {
  19633. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19634. return static_cast<Result>( d.vkCreateShadersEXT( m_device,
  19635. createInfoCount,
  19636. reinterpret_cast<const VkShaderCreateInfoEXT *>( pCreateInfos ),
  19637. reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ),
  19638. reinterpret_cast<VkShaderEXT *>( pShaders ) ) );
  19639. }
  19640. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19641. template <typename ShaderEXTAllocator, typename Dispatch>
  19642. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>::type
  19643. Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
  19644. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  19645. Dispatch const & d ) const
  19646. {
  19647. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19648. std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator> shaders( createInfos.size() );
  19649. VkResult result =
  19650. d.vkCreateShadersEXT( m_device,
  19651. createInfos.size(),
  19652. reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
  19653. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  19654. reinterpret_cast<VkShaderEXT *>( shaders.data() ) );
  19655. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT" );
  19656. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), shaders );
  19657. }
  19658. template <typename ShaderEXTAllocator,
  19659. typename Dispatch,
  19660. typename B0,
  19661. typename std::enable_if<std::is_same<typename B0::value_type, ShaderEXT>::value, int>::type>
  19662. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator>>::type
  19663. Device::createShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
  19664. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  19665. ShaderEXTAllocator & shaderEXTAllocator,
  19666. Dispatch const & d ) const
  19667. {
  19668. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19669. std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT, ShaderEXTAllocator> shaders( createInfos.size(), shaderEXTAllocator );
  19670. VkResult result =
  19671. d.vkCreateShadersEXT( m_device,
  19672. createInfos.size(),
  19673. reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
  19674. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  19675. reinterpret_cast<VkShaderEXT *>( shaders.data() ) );
  19676. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXT" );
  19677. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), shaders );
  19678. }
  19679. template <typename Dispatch>
  19680. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderEXT>::type
  19681. Device::createShaderEXT( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
  19682. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  19683. Dispatch const & d ) const
  19684. {
  19685. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19686. VULKAN_HPP_NAMESPACE::ShaderEXT shader;
  19687. VkResult result =
  19688. d.vkCreateShadersEXT( m_device,
  19689. 1,
  19690. reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ),
  19691. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  19692. reinterpret_cast<VkShaderEXT *>( &shader ) );
  19693. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXT" );
  19694. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), shader );
  19695. }
  19696. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  19697. template <typename Dispatch, typename ShaderEXTAllocator>
  19698. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  19699. typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>::type
  19700. Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
  19701. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  19702. Dispatch const & d ) const
  19703. {
  19704. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19705. std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT> shaders( createInfos.size() );
  19706. VkResult result =
  19707. d.vkCreateShadersEXT( m_device,
  19708. createInfos.size(),
  19709. reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
  19710. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  19711. reinterpret_cast<VkShaderEXT *>( shaders.data() ) );
  19712. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique" );
  19713. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders;
  19714. uniqueShaders.reserve( createInfos.size() );
  19715. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  19716. for ( auto const & shader : shaders )
  19717. {
  19718. uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) );
  19719. }
  19720. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueShaders ) );
  19721. }
  19722. template <typename Dispatch,
  19723. typename ShaderEXTAllocator,
  19724. typename B0,
  19725. typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<ShaderEXT, Dispatch>>::value, int>::type>
  19726. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  19727. typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator>>::type
  19728. Device::createShadersEXTUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT> const & createInfos,
  19729. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  19730. ShaderEXTAllocator & shaderEXTAllocator,
  19731. Dispatch const & d ) const
  19732. {
  19733. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19734. std::vector<VULKAN_HPP_NAMESPACE::ShaderEXT> shaders( createInfos.size() );
  19735. VkResult result =
  19736. d.vkCreateShadersEXT( m_device,
  19737. createInfos.size(),
  19738. reinterpret_cast<const VkShaderCreateInfoEXT *>( createInfos.data() ),
  19739. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  19740. reinterpret_cast<VkShaderEXT *>( shaders.data() ) );
  19741. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShadersEXTUnique" );
  19742. std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>, ShaderEXTAllocator> uniqueShaders( shaderEXTAllocator );
  19743. uniqueShaders.reserve( createInfos.size() );
  19744. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  19745. for ( auto const & shader : shaders )
  19746. {
  19747. uniqueShaders.push_back( UniqueHandle<ShaderEXT, Dispatch>( shader, deleter ) );
  19748. }
  19749. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueShaders ) );
  19750. }
  19751. template <typename Dispatch>
  19752. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>>::type
  19753. Device::createShaderEXTUnique( const VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT & createInfo,
  19754. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  19755. Dispatch const & d ) const
  19756. {
  19757. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19758. VULKAN_HPP_NAMESPACE::ShaderEXT shader;
  19759. VkResult result =
  19760. d.vkCreateShadersEXT( m_device,
  19761. 1,
  19762. reinterpret_cast<const VkShaderCreateInfoEXT *>( &createInfo ),
  19763. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ),
  19764. reinterpret_cast<VkShaderEXT *>( &shader ) );
  19765. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderEXTUnique" );
  19766. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ),
  19767. UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderEXT, Dispatch>( shader, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
  19768. }
  19769. # endif /* VULKAN_HPP_NO_SMART_HANDLE */
  19770. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19771. template <typename Dispatch>
  19772. VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
  19773. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  19774. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19775. {
  19776. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19777. d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  19778. }
  19779. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19780. template <typename Dispatch>
  19781. VULKAN_HPP_INLINE void Device::destroyShaderEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
  19782. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  19783. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19784. {
  19785. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19786. d.vkDestroyShaderEXT( m_device,
  19787. static_cast<VkShaderEXT>( shader ),
  19788. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  19789. }
  19790. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19791. template <typename Dispatch>
  19792. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
  19793. const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator,
  19794. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19795. {
  19796. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19797. d.vkDestroyShaderEXT( m_device, static_cast<VkShaderEXT>( shader ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  19798. }
  19799. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19800. template <typename Dispatch>
  19801. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderEXT shader,
  19802. Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator,
  19803. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19804. {
  19805. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19806. d.vkDestroyShaderEXT( m_device,
  19807. static_cast<VkShaderEXT>( shader ),
  19808. reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
  19809. }
  19810. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19811. template <typename Dispatch>
  19812. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result
  19813. Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19814. {
  19815. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19816. return static_cast<Result>( d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), pDataSize, pData ) );
  19817. }
  19818. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19819. template <typename Uint8_tAllocator, typename Dispatch>
  19820. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
  19821. Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Dispatch const & d ) const
  19822. {
  19823. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19824. std::vector<uint8_t, Uint8_tAllocator> data;
  19825. size_t dataSize;
  19826. VkResult result;
  19827. do
  19828. {
  19829. result = d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, nullptr );
  19830. if ( ( result == VK_SUCCESS ) && dataSize )
  19831. {
  19832. data.resize( dataSize );
  19833. result = d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, reinterpret_cast<void *>( data.data() ) );
  19834. }
  19835. } while ( result == VK_INCOMPLETE );
  19836. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" );
  19837. VULKAN_HPP_ASSERT( dataSize <= data.size() );
  19838. if ( dataSize < data.size() )
  19839. {
  19840. data.resize( dataSize );
  19841. }
  19842. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  19843. }
  19844. template <typename Uint8_tAllocator,
  19845. typename Dispatch,
  19846. typename B1,
  19847. typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type>
  19848. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type
  19849. Device::getShaderBinaryDataEXT( VULKAN_HPP_NAMESPACE::ShaderEXT shader, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
  19850. {
  19851. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19852. std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
  19853. size_t dataSize;
  19854. VkResult result;
  19855. do
  19856. {
  19857. result = d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, nullptr );
  19858. if ( ( result == VK_SUCCESS ) && dataSize )
  19859. {
  19860. data.resize( dataSize );
  19861. result = d.vkGetShaderBinaryDataEXT( m_device, static_cast<VkShaderEXT>( shader ), &dataSize, reinterpret_cast<void *>( data.data() ) );
  19862. }
  19863. } while ( result == VK_INCOMPLETE );
  19864. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderBinaryDataEXT" );
  19865. VULKAN_HPP_ASSERT( dataSize <= data.size() );
  19866. if ( dataSize < data.size() )
  19867. {
  19868. data.resize( dataSize );
  19869. }
  19870. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
  19871. }
  19872. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19873. template <typename Dispatch>
  19874. VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( uint32_t stageCount,
  19875. const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits * pStages,
  19876. const VULKAN_HPP_NAMESPACE::ShaderEXT * pShaders,
  19877. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19878. {
  19879. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19880. d.vkCmdBindShadersEXT(
  19881. m_commandBuffer, stageCount, reinterpret_cast<const VkShaderStageFlagBits *>( pStages ), reinterpret_cast<const VkShaderEXT *>( pShaders ) );
  19882. }
  19883. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19884. template <typename Dispatch>
  19885. VULKAN_HPP_INLINE void CommandBuffer::bindShadersEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderStageFlagBits> const & stages,
  19886. VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ShaderEXT> const & shaders,
  19887. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  19888. {
  19889. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19890. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  19891. VULKAN_HPP_ASSERT( stages.size() == shaders.size() );
  19892. # else
  19893. if ( stages.size() != shaders.size() )
  19894. {
  19895. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindShadersEXT: stages.size() != shaders.size()" );
  19896. }
  19897. # endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  19898. d.vkCmdBindShadersEXT( m_commandBuffer,
  19899. stages.size(),
  19900. reinterpret_cast<const VkShaderStageFlagBits *>( stages.data() ),
  19901. reinterpret_cast<const VkShaderEXT *>( shaders.data() ) );
  19902. }
  19903. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19904. //=== VK_QCOM_tile_properties ===
  19905. template <typename Dispatch>
  19906. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
  19907. uint32_t * pPropertiesCount,
  19908. VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties,
  19909. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19910. {
  19911. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19912. return static_cast<Result>( d.vkGetFramebufferTilePropertiesQCOM(
  19913. m_device, static_cast<VkFramebuffer>( framebuffer ), pPropertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) );
  19914. }
  19915. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19916. template <typename TilePropertiesQCOMAllocator, typename Dispatch>
  19917. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type
  19918. Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Dispatch const & d ) const
  19919. {
  19920. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19921. std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties;
  19922. uint32_t propertiesCount;
  19923. VkResult result;
  19924. do
  19925. {
  19926. result = d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr );
  19927. if ( ( result == VK_SUCCESS ) && propertiesCount )
  19928. {
  19929. properties.resize( propertiesCount );
  19930. result = d.vkGetFramebufferTilePropertiesQCOM(
  19931. m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) );
  19932. }
  19933. } while ( result == VK_INCOMPLETE );
  19934. VULKAN_HPP_ASSERT( propertiesCount <= properties.size() );
  19935. if ( propertiesCount < properties.size() )
  19936. {
  19937. properties.resize( propertiesCount );
  19938. }
  19939. return properties;
  19940. }
  19941. template <typename TilePropertiesQCOMAllocator,
  19942. typename Dispatch,
  19943. typename B1,
  19944. typename std::enable_if<std::is_same<typename B1::value_type, TilePropertiesQCOM>::value, int>::type>
  19945. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator>>::type
  19946. Device::getFramebufferTilePropertiesQCOM( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer,
  19947. TilePropertiesQCOMAllocator & tilePropertiesQCOMAllocator,
  19948. Dispatch const & d ) const
  19949. {
  19950. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19951. std::vector<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM, TilePropertiesQCOMAllocator> properties( tilePropertiesQCOMAllocator );
  19952. uint32_t propertiesCount;
  19953. VkResult result;
  19954. do
  19955. {
  19956. result = d.vkGetFramebufferTilePropertiesQCOM( m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, nullptr );
  19957. if ( ( result == VK_SUCCESS ) && propertiesCount )
  19958. {
  19959. properties.resize( propertiesCount );
  19960. result = d.vkGetFramebufferTilePropertiesQCOM(
  19961. m_device, static_cast<VkFramebuffer>( framebuffer ), &propertiesCount, reinterpret_cast<VkTilePropertiesQCOM *>( properties.data() ) );
  19962. }
  19963. } while ( result == VK_INCOMPLETE );
  19964. VULKAN_HPP_ASSERT( propertiesCount <= properties.size() );
  19965. if ( propertiesCount < properties.size() )
  19966. {
  19967. properties.resize( propertiesCount );
  19968. }
  19969. return properties;
  19970. }
  19971. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19972. template <typename Dispatch>
  19973. VULKAN_HPP_INLINE Result Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo,
  19974. VULKAN_HPP_NAMESPACE::TilePropertiesQCOM * pProperties,
  19975. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19976. {
  19977. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19978. return static_cast<Result>( d.vkGetDynamicRenderingTilePropertiesQCOM(
  19979. m_device, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( pProperties ) ) );
  19980. }
  19981. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  19982. template <typename Dispatch>
  19983. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::TilePropertiesQCOM
  19984. Device::getDynamicRenderingTilePropertiesQCOM( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19985. {
  19986. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  19987. VULKAN_HPP_NAMESPACE::TilePropertiesQCOM properties;
  19988. d.vkGetDynamicRenderingTilePropertiesQCOM(
  19989. m_device, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ), reinterpret_cast<VkTilePropertiesQCOM *>( &properties ) );
  19990. return properties;
  19991. }
  19992. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  19993. //=== VK_NV_low_latency2 ===
  19994. template <typename Dispatch>
  19995. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  19996. const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV * pSleepModeInfo,
  19997. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  19998. {
  19999. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  20000. return static_cast<Result>(
  20001. d.vkSetLatencySleepModeNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( pSleepModeInfo ) ) );
  20002. }
  20003. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  20004. template <typename Dispatch>
  20005. VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setLatencySleepModeNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  20006. const VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV & sleepModeInfo,
  20007. Dispatch const & d ) const
  20008. {
  20009. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  20010. VkResult result =
  20011. d.vkSetLatencySleepModeNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepModeInfoNV *>( &sleepModeInfo ) );
  20012. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setLatencySleepModeNV" );
  20013. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  20014. }
  20015. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  20016. template <typename Dispatch>
  20017. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  20018. const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV * pSleepInfo,
  20019. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  20020. {
  20021. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  20022. return static_cast<Result>(
  20023. d.vkLatencySleepNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( pSleepInfo ) ) );
  20024. }
  20025. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  20026. template <typename Dispatch>
  20027. VULKAN_HPP_INLINE typename ResultValueType<void>::type
  20028. Device::latencySleepNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::LatencySleepInfoNV & sleepInfo, Dispatch const & d ) const
  20029. {
  20030. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  20031. VkResult result = d.vkLatencySleepNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkLatencySleepInfoNV *>( &sleepInfo ) );
  20032. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::latencySleepNV" );
  20033. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
  20034. }
  20035. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  20036. template <typename Dispatch>
  20037. VULKAN_HPP_INLINE void Device::setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  20038. const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV * pLatencyMarkerInfo,
  20039. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  20040. {
  20041. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  20042. d.vkSetLatencyMarkerNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) );
  20043. }
  20044. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  20045. template <typename Dispatch>
  20046. VULKAN_HPP_INLINE void Device::setLatencyMarkerNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  20047. const VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV & latencyMarkerInfo,
  20048. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  20049. {
  20050. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  20051. d.vkSetLatencyMarkerNV( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkSetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) );
  20052. }
  20053. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  20054. template <typename Dispatch>
  20055. VULKAN_HPP_INLINE void Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain,
  20056. uint32_t * pTimingCount,
  20057. VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV * pLatencyMarkerInfo,
  20058. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  20059. {
  20060. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  20061. d.vkGetLatencyTimingsNV(
  20062. m_device, static_cast<VkSwapchainKHR>( swapchain ), pTimingCount, reinterpret_cast<VkGetLatencyMarkerInfoNV *>( pLatencyMarkerInfo ) );
  20063. }
  20064. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  20065. template <typename Dispatch>
  20066. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::pair<uint32_t, VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV>
  20067. Device::getLatencyTimingsNV( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  20068. {
  20069. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  20070. std::pair<uint32_t, VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV> data_;
  20071. uint32_t & timingCount = data_.first;
  20072. VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV & latencyMarkerInfo = data_.second;
  20073. d.vkGetLatencyTimingsNV(
  20074. m_device, static_cast<VkSwapchainKHR>( swapchain ), &timingCount, reinterpret_cast<VkGetLatencyMarkerInfoNV *>( &latencyMarkerInfo ) );
  20075. return data_;
  20076. }
  20077. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  20078. template <typename Dispatch>
  20079. VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV * pQueueTypeInfo,
  20080. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  20081. {
  20082. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  20083. d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( pQueueTypeInfo ) );
  20084. }
  20085. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  20086. template <typename Dispatch>
  20087. VULKAN_HPP_INLINE void Queue::notifyOutOfBandNV( const VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV & queueTypeInfo,
  20088. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  20089. {
  20090. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  20091. d.vkQueueNotifyOutOfBandNV( m_queue, reinterpret_cast<const VkOutOfBandQueueTypeInfoNV *>( &queueTypeInfo ) );
  20092. }
  20093. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  20094. //=== VK_KHR_cooperative_matrix ===
  20095. template <typename Dispatch>
  20096. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesKHR(
  20097. uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  20098. {
  20099. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  20100. return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR(
  20101. m_physicalDevice, pPropertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( pProperties ) ) );
  20102. }
  20103. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  20104. template <typename CooperativeMatrixPropertiesKHRAllocator, typename Dispatch>
  20105. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  20106. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator>>::type
  20107. PhysicalDevice::getCooperativeMatrixPropertiesKHR( Dispatch const & d ) const
  20108. {
  20109. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  20110. std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator> properties;
  20111. uint32_t propertyCount;
  20112. VkResult result;
  20113. do
  20114. {
  20115. result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
  20116. if ( ( result == VK_SUCCESS ) && propertyCount )
  20117. {
  20118. properties.resize( propertyCount );
  20119. result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR(
  20120. m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( properties.data() ) );
  20121. }
  20122. } while ( result == VK_INCOMPLETE );
  20123. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" );
  20124. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  20125. if ( propertyCount < properties.size() )
  20126. {
  20127. properties.resize( propertyCount );
  20128. }
  20129. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  20130. }
  20131. template <typename CooperativeMatrixPropertiesKHRAllocator,
  20132. typename Dispatch,
  20133. typename B1,
  20134. typename std::enable_if<std::is_same<typename B1::value_type, CooperativeMatrixPropertiesKHR>::value, int>::type>
  20135. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE
  20136. typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator>>::type
  20137. PhysicalDevice::getCooperativeMatrixPropertiesKHR( CooperativeMatrixPropertiesKHRAllocator & cooperativeMatrixPropertiesKHRAllocator,
  20138. Dispatch const & d ) const
  20139. {
  20140. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  20141. std::vector<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR, CooperativeMatrixPropertiesKHRAllocator> properties(
  20142. cooperativeMatrixPropertiesKHRAllocator );
  20143. uint32_t propertyCount;
  20144. VkResult result;
  20145. do
  20146. {
  20147. result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( m_physicalDevice, &propertyCount, nullptr );
  20148. if ( ( result == VK_SUCCESS ) && propertyCount )
  20149. {
  20150. properties.resize( propertyCount );
  20151. result = d.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR(
  20152. m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesKHR *>( properties.data() ) );
  20153. }
  20154. } while ( result == VK_INCOMPLETE );
  20155. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getCooperativeMatrixPropertiesKHR" );
  20156. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  20157. if ( propertyCount < properties.size() )
  20158. {
  20159. properties.resize( propertyCount );
  20160. }
  20161. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  20162. }
  20163. #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  20164. //=== VK_EXT_attachment_feedback_loop_dynamic_state ===
  20165. template <typename Dispatch>
  20166. VULKAN_HPP_INLINE void CommandBuffer::setAttachmentFeedbackLoopEnableEXT( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask,
  20167. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  20168. {
  20169. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  20170. d.vkCmdSetAttachmentFeedbackLoopEnableEXT( m_commandBuffer, static_cast<VkImageAspectFlags>( aspectMask ) );
  20171. }
  20172. #if defined( VK_USE_PLATFORM_SCREEN_QNX )
  20173. //=== VK_QNX_external_memory_screen_buffer ===
  20174. template <typename Dispatch>
  20175. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getScreenBufferPropertiesQNX( const struct _screen_buffer * buffer,
  20176. VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX * pProperties,
  20177. Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  20178. {
  20179. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  20180. return static_cast<Result>( d.vkGetScreenBufferPropertiesQNX( m_device, buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( pProperties ) ) );
  20181. }
  20182. # ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  20183. template <typename Dispatch>
  20184. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>::type
  20185. Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const
  20186. {
  20187. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  20188. VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX properties;
  20189. VkResult result = d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) );
  20190. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" );
  20191. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
  20192. }
  20193. template <typename X, typename Y, typename... Z, typename Dispatch>
  20194. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::StructureChain<X, Y, Z...>>::type
  20195. Device::getScreenBufferPropertiesQNX( const struct _screen_buffer & buffer, Dispatch const & d ) const
  20196. {
  20197. VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
  20198. StructureChain<X, Y, Z...> structureChain;
  20199. VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>();
  20200. VkResult result = d.vkGetScreenBufferPropertiesQNX( m_device, &buffer, reinterpret_cast<VkScreenBufferPropertiesQNX *>( &properties ) );
  20201. resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getScreenBufferPropertiesQNX" );
  20202. return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
  20203. }
  20204. # endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
  20205. #endif /*VK_USE_PLATFORM_SCREEN_QNX*/
  20206. } // namespace VULKAN_HPP_NAMESPACE
  20207. #endif